diff --git a/buf.gen.yaml b/buf.gen.yaml index fce4d534c..78467f082 100644 --- a/buf.gen.yaml +++ b/buf.gen.yaml @@ -6,6 +6,10 @@ plugins: out: ./packages/jumpstarter-protocol/jumpstarter_protocol - remote: buf.build/grpc/python out: ./packages/jumpstarter-protocol/jumpstarter_protocol + - remote: buf.build/community/nipunn1313-mypy:v3.7.0 + out: ./packages/jumpstarter-protocol/jumpstarter_protocol + - remote: buf.build/community/nipunn1313-mypy-grpc:v3.7.0 + out: ./packages/jumpstarter-protocol/jumpstarter_protocol inputs: - git_repo: https://github.com/jumpstarter-dev/jumpstarter-protocol.git branch: main diff --git a/packages/jumpstarter-cli-admin/jumpstarter_cli_admin/create_test.py b/packages/jumpstarter-cli-admin/jumpstarter_cli_admin/create_test.py index 7331c3b5b..5d4c2d9c8 100644 --- a/packages/jumpstarter-cli-admin/jumpstarter_cli_admin/create_test.py +++ b/packages/jumpstarter-cli-admin/jumpstarter_cli_admin/create_test.py @@ -234,7 +234,9 @@ def test_create_client( "name": "{name}-credential" }}, "devices": [], - "endpoint": "{endpoint}" + "endpoint": "{endpoint}", + "exporterStatus": null, + "statusMessage": null }} }} """.format(name=EXPORTER_NAME, endpoint=EXPORTER_ENDPOINT) @@ -250,6 +252,8 @@ def test_create_client( name: {name}-credential devices: [] endpoint: {endpoint} + exporterStatus: null + statusMessage: null """.format(name=EXPORTER_NAME, endpoint=EXPORTER_ENDPOINT) diff --git a/packages/jumpstarter-cli-admin/jumpstarter_cli_admin/get_test.py b/packages/jumpstarter-cli-admin/jumpstarter_cli_admin/get_test.py index 4b12a807b..a79ff7d0f 100644 --- a/packages/jumpstarter-cli-admin/jumpstarter_cli_admin/get_test.py +++ b/packages/jumpstarter-cli-admin/jumpstarter_cli_admin/get_test.py @@ -301,7 +301,10 @@ def test_get_clients(_load_kube_config_mock, list_clients_mock: AsyncMock): kind="Exporter", metadata=V1ObjectMeta(name="test", namespace="testing", creation_timestamp="2024-01-01T21:00:00Z"), status=V1Alpha1ExporterStatus( - endpoint="grpc://example.com:443", credential=V1ObjectReference(name="test-credential"), devices=[] + endpoint="grpc://example.com:443", + credential=V1ObjectReference(name="test-credential"), + devices=[], + exporter_status="Available", ), ) @@ -318,7 +321,9 @@ def test_get_clients(_load_kube_config_mock, list_clients_mock: AsyncMock): "name": "test-credential" }, "devices": [], - "endpoint": "grpc://example.com:443" + "endpoint": "grpc://example.com:443", + "exporterStatus": "Available", + "statusMessage": null } } """ @@ -334,6 +339,8 @@ def test_get_clients(_load_kube_config_mock, list_clients_mock: AsyncMock): name: test-credential devices: [] endpoint: grpc://example.com:443 + exporterStatus: Available + statusMessage: null """ @@ -348,6 +355,7 @@ def test_get_exporter(_load_kube_config_mock, get_exporter_mock: AsyncMock): result = runner.invoke(get, ["exporter", "test"]) assert result.exit_code == 0 assert "test" in result.output + assert "Available" in result.output assert "grpc://example.com:443" in result.output get_exporter_mock.reset_mock() @@ -396,6 +404,7 @@ def test_get_exporter(_load_kube_config_mock, get_exporter_mock: AsyncMock): V1Alpha1ExporterDevice(labels={"hardware": "rpi4"}, uuid="82a8ac0d-d7ff-4009-8948-18a3c5c607b1"), V1Alpha1ExporterDevice(labels={"hardware": "rpi4"}, uuid="f7cd30ac-64a3-42c6-ba31-b25f033b97c1"), ], + exporter_status="Available", ), ) @@ -425,7 +434,9 @@ def test_get_exporter(_load_kube_config_mock, get_exporter_mock: AsyncMock): "uuid": "f7cd30ac-64a3-42c6-ba31-b25f033b97c1" } ], - "endpoint": "grpc://example.com:443" + "endpoint": "grpc://example.com:443", + "exporterStatus": "Available", + "statusMessage": null } } """ @@ -447,6 +458,8 @@ def test_get_exporter(_load_kube_config_mock, get_exporter_mock: AsyncMock): hardware: rpi4 uuid: f7cd30ac-64a3-42c6-ba31-b25f033b97c1 endpoint: grpc://example.com:443 + exporterStatus: Available + statusMessage: null """ @@ -460,6 +473,7 @@ def test_get_exporter_devices(_load_kube_config_mock, get_exporter_mock: AsyncMo result = runner.invoke(get, ["exporter", "test", "--devices"]) assert result.exit_code == 0 assert "test" in result.output + assert "Available" in result.output assert "grpc://example.com:443" in result.output assert "hardware:rpi4" in result.output assert "82a8ac0d-d7ff-4009-8948-18a3c5c607b1" in result.output @@ -510,6 +524,7 @@ def test_get_exporter_devices(_load_kube_config_mock, get_exporter_mock: AsyncMo endpoint="grpc://example.com:443", credential=V1ObjectReference(name="test-credential"), devices=[], + exporter_status="Available", ), ), V1Alpha1Exporter( @@ -520,6 +535,7 @@ def test_get_exporter_devices(_load_kube_config_mock, get_exporter_mock: AsyncMo endpoint="grpc://example.com:443", credential=V1ObjectReference(name="another-credential"), devices=[], + exporter_status="Available", ), ), ] @@ -541,7 +557,9 @@ def test_get_exporter_devices(_load_kube_config_mock, get_exporter_mock: AsyncMo "name": "test-credential" }, "devices": [], - "endpoint": "grpc://example.com:443" + "endpoint": "grpc://example.com:443", + "exporterStatus": "Available", + "statusMessage": null } }, { @@ -557,7 +575,9 @@ def test_get_exporter_devices(_load_kube_config_mock, get_exporter_mock: AsyncMo "name": "another-credential" }, "devices": [], - "endpoint": "grpc://example.com:443" + "endpoint": "grpc://example.com:443", + "exporterStatus": "Available", + "statusMessage": null } } ], @@ -578,6 +598,8 @@ def test_get_exporter_devices(_load_kube_config_mock, get_exporter_mock: AsyncMo name: test-credential devices: [] endpoint: grpc://example.com:443 + exporterStatus: Available + statusMessage: null - apiVersion: jumpstarter.dev/v1alpha1 kind: Exporter metadata: @@ -589,6 +611,8 @@ def test_get_exporter_devices(_load_kube_config_mock, get_exporter_mock: AsyncMo name: another-credential devices: [] endpoint: grpc://example.com:443 + exporterStatus: Available + statusMessage: null kind: ExporterList """ @@ -609,6 +633,7 @@ def test_get_exporters(_load_kube_config_mock, list_exporters_mock: AsyncMock): assert result.exit_code == 0 assert "test" in result.output assert "another" in result.output + assert "Available" in result.output list_exporters_mock.reset_mock() # List exporters JSON output @@ -655,6 +680,7 @@ def test_get_exporters(_load_kube_config_mock, list_exporters_mock: AsyncMock): devices=[ V1Alpha1ExporterDevice(labels={"hardware": "rpi4"}, uuid="82a8ac0d-d7ff-4009-8948-18a3c5c607b1") ], + exporter_status="Available", ), ), V1Alpha1Exporter( @@ -667,6 +693,7 @@ def test_get_exporters(_load_kube_config_mock, list_exporters_mock: AsyncMock): devices=[ V1Alpha1ExporterDevice(labels={"hardware": "rpi4"}, uuid="f7cd30ac-64a3-42c6-ba31-b25f033b97c1"), ], + exporter_status="Available", ), ), ] @@ -695,7 +722,9 @@ def test_get_exporters(_load_kube_config_mock, list_exporters_mock: AsyncMock): "uuid": "82a8ac0d-d7ff-4009-8948-18a3c5c607b1" } ], - "endpoint": "grpc://example.com:443" + "endpoint": "grpc://example.com:443", + "exporterStatus": "Available", + "statusMessage": null } }, { @@ -718,7 +747,9 @@ def test_get_exporters(_load_kube_config_mock, list_exporters_mock: AsyncMock): "uuid": "f7cd30ac-64a3-42c6-ba31-b25f033b97c1" } ], - "endpoint": "grpc://example.com:443" + "endpoint": "grpc://example.com:443", + "exporterStatus": "Available", + "statusMessage": null } } ], @@ -742,6 +773,8 @@ def test_get_exporters(_load_kube_config_mock, list_exporters_mock: AsyncMock): hardware: rpi4 uuid: 82a8ac0d-d7ff-4009-8948-18a3c5c607b1 endpoint: grpc://example.com:443 + exporterStatus: Available + statusMessage: null - apiVersion: jumpstarter.dev/v1alpha1 kind: Exporter metadata: @@ -756,6 +789,8 @@ def test_get_exporters(_load_kube_config_mock, list_exporters_mock: AsyncMock): hardware: rpi4 uuid: f7cd30ac-64a3-42c6-ba31-b25f033b97c1 endpoint: grpc://example.com:443 + exporterStatus: Available + statusMessage: null kind: ExporterList """ @@ -774,6 +809,7 @@ def test_get_exporters_devices(_load_kube_config_mock, list_exporters_mock: Asyn assert result.exit_code == 0 assert "test" in result.output assert "another" in result.output + assert "Available" in result.output assert "hardware:rpi4" in result.output assert "82a8ac0d-d7ff-4009-8948-18a3c5c607b1" in result.output assert "f7cd30ac-64a3-42c6-ba31-b25f033b97c1" in result.output diff --git a/packages/jumpstarter-cli/jumpstarter_cli/get.py b/packages/jumpstarter-cli/jumpstarter_cli/get.py index f7d1a041b..869dfd061 100644 --- a/packages/jumpstarter-cli/jumpstarter_cli/get.py +++ b/packages/jumpstarter-cli/jumpstarter_cli/get.py @@ -21,8 +21,8 @@ def get(): @opt_output_all @opt_comma_separated( "with", - {"leases", "online"}, - help_text="Include fields: leases, online (comma-separated or repeated)" + {"leases", "online", "status"}, + help_text="Include fields: leases, online, status (comma-separated or repeated)", ) @handle_exceptions_with_reauthentication(relogin_client) def get_exporters(config, selector: str | None, output: OutputType, with_options: list[str]): @@ -32,7 +32,10 @@ def get_exporters(config, selector: str | None, output: OutputType, with_options include_leases = "leases" in with_options include_online = "online" in with_options - exporters = config.list_exporters(filter=selector, include_leases=include_leases, include_online=include_online) + include_status = "status" in with_options + exporters = config.list_exporters( + filter=selector, include_leases=include_leases, include_online=include_online, include_status=include_status + ) model_print(exporters, output) diff --git a/packages/jumpstarter-cli/jumpstarter_cli/run.py b/packages/jumpstarter-cli/jumpstarter_cli/run.py index 50f9606e1..3650b63ab 100644 --- a/packages/jumpstarter-cli/jumpstarter_cli/run.py +++ b/packages/jumpstarter-cli/jumpstarter_cli/run.py @@ -76,11 +76,22 @@ async def signal_handler(): except* Exception as excgroup: _handle_exporter_exceptions(excgroup) + # Check if exporter set an exit code (e.g., from hook failure with on_failure='exit') + exporter_exit_code = exporter.exit_code + # Cancel the signal handler after exporter completes signal_tg.cancel_scope.cancel() - # Return signal number if received, otherwise 0 for immediate restart - return received_signal if received_signal else 0 + # Return exit code in priority order: + # 1. Signal number if received (for signal-based termination) + # 2. Exporter's exit code if set (for hook failure with on_failure='exit') + # 3. 0 for immediate restart (normal exit without signal or explicit exit code) + if received_signal: + return received_signal + elif exporter_exit_code is not None: + return exporter_exit_code + else: + return 0 sys.exit(anyio.run(serve_with_graceful_shutdown)) diff --git a/packages/jumpstarter-kubernetes/jumpstarter_kubernetes/exporters.py b/packages/jumpstarter-kubernetes/jumpstarter_kubernetes/exporters.py index 004c47ffb..1ea45006f 100644 --- a/packages/jumpstarter-kubernetes/jumpstarter_kubernetes/exporters.py +++ b/packages/jumpstarter-kubernetes/jumpstarter_kubernetes/exporters.py @@ -26,6 +26,8 @@ class V1Alpha1ExporterStatus(JsonBaseModel): credential: SerializeV1ObjectReference devices: list[V1Alpha1ExporterDevice] endpoint: str + exporter_status: str | None = Field(alias="exporterStatus", default=None) + status_message: str | None = Field(alias="statusMessage", default=None) class V1Alpha1Exporter(JsonBaseModel): @@ -55,6 +57,8 @@ def from_dict(dict: dict): devices=[V1Alpha1ExporterDevice(labels=d["labels"], uuid=d["uuid"]) for d in dict["status"]["devices"]] if "devices" in dict["status"] else [], + exporter_status=dict["status"].get("exporterStatus"), + status_message=dict["status"].get("statusMessage"), ), ) @@ -62,17 +66,20 @@ def from_dict(dict: dict): def rich_add_columns(cls, table, devices: bool = False): if devices: table.add_column("NAME", no_wrap=True) + table.add_column("STATUS") table.add_column("ENDPOINT") table.add_column("AGE") table.add_column("LABELS") table.add_column("UUID") else: table.add_column("NAME", no_wrap=True) + table.add_column("STATUS") table.add_column("ENDPOINT") table.add_column("DEVICES") table.add_column("AGE") def rich_add_rows(self, table, devices: bool = False): + status = self.status.exporter_status if self.status else "Unknown" if devices: if self.status is not None: for d in self.status.devices: @@ -82,6 +89,7 @@ def rich_add_rows(self, table, devices: bool = False): labels.append(f"{label}:{str(d.labels[label])}") table.add_row( self.metadata.name, + status or "Unknown", self.status.endpoint, time_since(self.metadata.creation_timestamp), ",".join(labels), @@ -91,6 +99,7 @@ def rich_add_rows(self, table, devices: bool = False): else: table.add_row( self.metadata.name, + status or "Unknown", self.status.endpoint, str(len(self.status.devices) if self.status and self.status.devices else 0), time_since(self.metadata.creation_timestamp), diff --git a/packages/jumpstarter-kubernetes/jumpstarter_kubernetes/exporters_test.py b/packages/jumpstarter-kubernetes/jumpstarter_kubernetes/exporters_test.py index 1792a0f3b..683f23547 100644 --- a/packages/jumpstarter-kubernetes/jumpstarter_kubernetes/exporters_test.py +++ b/packages/jumpstarter-kubernetes/jumpstarter_kubernetes/exporters_test.py @@ -47,7 +47,9 @@ def test_exporter_dump_json(): "uuid": "f4cf49ab-fc64-46c6-94e7-a40502eb77b1" } ], - "endpoint": "https://test-exporter" + "endpoint": "https://test-exporter", + "exporterStatus": null, + "statusMessage": null } }""" ) @@ -73,6 +75,8 @@ def test_exporter_dump_yaml(): test: label uuid: f4cf49ab-fc64-46c6-94e7-a40502eb77b1 endpoint: https://test-exporter + exporterStatus: null + statusMessage: null """ ) @@ -113,8 +117,9 @@ def test_exporter_rich_add_columns_without_devices(): mock_table = MagicMock() V1Alpha1Exporter.rich_add_columns(mock_table, devices=False) - assert mock_table.add_column.call_count == 4 + assert mock_table.add_column.call_count == 5 mock_table.add_column.assert_any_call("NAME", no_wrap=True) + mock_table.add_column.assert_any_call("STATUS") mock_table.add_column.assert_any_call("ENDPOINT") mock_table.add_column.assert_any_call("DEVICES") mock_table.add_column.assert_any_call("AGE") @@ -128,8 +133,9 @@ def test_exporter_rich_add_columns_with_devices(): mock_table = MagicMock() V1Alpha1Exporter.rich_add_columns(mock_table, devices=True) - assert mock_table.add_column.call_count == 5 + assert mock_table.add_column.call_count == 6 mock_table.add_column.assert_any_call("NAME", no_wrap=True) + mock_table.add_column.assert_any_call("STATUS") mock_table.add_column.assert_any_call("ENDPOINT") mock_table.add_column.assert_any_call("AGE") mock_table.add_column.assert_any_call("LABELS") @@ -146,9 +152,10 @@ def test_exporter_rich_add_rows_without_devices(): mock_table.add_row.assert_called_once() args = mock_table.add_row.call_args[0] assert args[0] == "test-exporter" - assert args[1] == "https://test-exporter" - assert args[2] == "1" # Number of devices - assert args[3] == "5m" # Age + assert args[1] == "Unknown" # Status (shows "Unknown" when exporter_status is None) + assert args[2] == "https://test-exporter" + assert args[3] == "1" # Number of devices + assert args[4] == "5m" # Age def test_exporter_rich_add_rows_with_devices(): @@ -161,10 +168,11 @@ def test_exporter_rich_add_rows_with_devices(): mock_table.add_row.assert_called_once() args = mock_table.add_row.call_args[0] assert args[0] == "test-exporter" - assert args[1] == "https://test-exporter" - assert args[2] == "5m" # Age - assert args[3] == "test:label" # Labels - assert args[4] == "f4cf49ab-fc64-46c6-94e7-a40502eb77b1" # UUID + assert args[1] == "Unknown" # Status (shows "Unknown" when exporter_status is None) + assert args[2] == "https://test-exporter" + assert args[3] == "5m" # Age + assert args[4] == "test:label" # Labels + assert args[5] == "f4cf49ab-fc64-46c6-94e7-a40502eb77b1" # UUID def test_exporter_rich_add_names(): @@ -212,7 +220,7 @@ def test_exporter_list_rich_add_columns(): mock_table = MagicMock() V1Alpha1ExporterList.rich_add_columns(mock_table, devices=False) - assert mock_table.add_column.call_count == 4 + assert mock_table.add_column.call_count == 5 def test_exporter_list_rich_add_columns_with_devices(): @@ -223,7 +231,7 @@ def test_exporter_list_rich_add_columns_with_devices(): mock_table = MagicMock() V1Alpha1ExporterList.rich_add_columns(mock_table, devices=True) - assert mock_table.add_column.call_count == 5 + assert mock_table.add_column.call_count == 6 def test_exporter_list_rich_add_rows(): diff --git a/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/client/v1/client_pb2.pyi b/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/client/v1/client_pb2.pyi new file mode 100644 index 000000000..500b13794 --- /dev/null +++ b/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/client/v1/client_pb2.pyi @@ -0,0 +1,318 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +Copyright 2024 The Jumpstarter Authors +(-- api-linter: core::0215::foreign-type-reference=disabled +(-- api-linter: core::0192::has-comments=disabled +(-- api-linter: core::0191::java-package=disabled +(-- api-linter: core::0191::java-outer-classname=disabled +(-- api-linter: core::0191::java-multiple-files=disabled +""" + +import builtins +import collections.abc +import google.protobuf.descriptor +import google.protobuf.duration_pb2 +import google.protobuf.field_mask_pb2 +import google.protobuf.internal.containers +import google.protobuf.message +import google.protobuf.timestamp_pb2 +import jumpstarter.v1.common_pb2 +import jumpstarter.v1.kubernetes_pb2 +import sys +import typing + +if sys.version_info >= (3, 10): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +@typing.final +class Exporter(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing.final + class LabelsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + + NAME_FIELD_NUMBER: builtins.int + LABELS_FIELD_NUMBER: builtins.int + ONLINE_FIELD_NUMBER: builtins.int + STATUS_FIELD_NUMBER: builtins.int + name: builtins.str + online: builtins.bool + status: jumpstarter.v1.common_pb2.ExporterStatus.ValueType + @property + def labels(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ... + def __init__( + self, + *, + name: builtins.str = ..., + labels: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + online: builtins.bool = ..., + status: jumpstarter.v1.common_pb2.ExporterStatus.ValueType = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["labels", b"labels", "name", b"name", "online", b"online", "status", b"status"]) -> None: ... + +Global___Exporter: typing_extensions.TypeAlias = Exporter + +@typing.final +class Lease(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + SELECTOR_FIELD_NUMBER: builtins.int + DURATION_FIELD_NUMBER: builtins.int + EFFECTIVE_DURATION_FIELD_NUMBER: builtins.int + BEGIN_TIME_FIELD_NUMBER: builtins.int + EFFECTIVE_BEGIN_TIME_FIELD_NUMBER: builtins.int + END_TIME_FIELD_NUMBER: builtins.int + EFFECTIVE_END_TIME_FIELD_NUMBER: builtins.int + CLIENT_FIELD_NUMBER: builtins.int + EXPORTER_FIELD_NUMBER: builtins.int + CONDITIONS_FIELD_NUMBER: builtins.int + name: builtins.str + selector: builtins.str + client: builtins.str + exporter: builtins.str + @property + def duration(self) -> google.protobuf.duration_pb2.Duration: ... + @property + def effective_duration(self) -> google.protobuf.duration_pb2.Duration: ... + @property + def begin_time(self) -> google.protobuf.timestamp_pb2.Timestamp: ... + @property + def effective_begin_time(self) -> google.protobuf.timestamp_pb2.Timestamp: ... + @property + def end_time(self) -> google.protobuf.timestamp_pb2.Timestamp: ... + @property + def effective_end_time(self) -> google.protobuf.timestamp_pb2.Timestamp: ... + @property + def conditions(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[jumpstarter.v1.kubernetes_pb2.Condition]: ... + def __init__( + self, + *, + name: builtins.str = ..., + selector: builtins.str = ..., + duration: google.protobuf.duration_pb2.Duration | None = ..., + effective_duration: google.protobuf.duration_pb2.Duration | None = ..., + begin_time: google.protobuf.timestamp_pb2.Timestamp | None = ..., + effective_begin_time: google.protobuf.timestamp_pb2.Timestamp | None = ..., + end_time: google.protobuf.timestamp_pb2.Timestamp | None = ..., + effective_end_time: google.protobuf.timestamp_pb2.Timestamp | None = ..., + client: builtins.str | None = ..., + exporter: builtins.str | None = ..., + conditions: collections.abc.Iterable[jumpstarter.v1.kubernetes_pb2.Condition] | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["_begin_time", b"_begin_time", "_client", b"_client", "_duration", b"_duration", "_effective_begin_time", b"_effective_begin_time", "_effective_end_time", b"_effective_end_time", "_end_time", b"_end_time", "_exporter", b"_exporter", "begin_time", b"begin_time", "client", b"client", "duration", b"duration", "effective_begin_time", b"effective_begin_time", "effective_duration", b"effective_duration", "effective_end_time", b"effective_end_time", "end_time", b"end_time", "exporter", b"exporter"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["_begin_time", b"_begin_time", "_client", b"_client", "_duration", b"_duration", "_effective_begin_time", b"_effective_begin_time", "_effective_end_time", b"_effective_end_time", "_end_time", b"_end_time", "_exporter", b"_exporter", "begin_time", b"begin_time", "client", b"client", "conditions", b"conditions", "duration", b"duration", "effective_begin_time", b"effective_begin_time", "effective_duration", b"effective_duration", "effective_end_time", b"effective_end_time", "end_time", b"end_time", "exporter", b"exporter", "name", b"name", "selector", b"selector"]) -> None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_begin_time", b"_begin_time"]) -> typing.Literal["begin_time"] | None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_client", b"_client"]) -> typing.Literal["client"] | None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_duration", b"_duration"]) -> typing.Literal["duration"] | None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_effective_begin_time", b"_effective_begin_time"]) -> typing.Literal["effective_begin_time"] | None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_effective_end_time", b"_effective_end_time"]) -> typing.Literal["effective_end_time"] | None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_end_time", b"_end_time"]) -> typing.Literal["end_time"] | None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_exporter", b"_exporter"]) -> typing.Literal["exporter"] | None: ... + +Global___Lease: typing_extensions.TypeAlias = Lease + +@typing.final +class GetExporterRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + name: builtins.str + def __init__( + self, + *, + name: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["name", b"name"]) -> None: ... + +Global___GetExporterRequest: typing_extensions.TypeAlias = GetExporterRequest + +@typing.final +class ListExportersRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PARENT_FIELD_NUMBER: builtins.int + PAGE_SIZE_FIELD_NUMBER: builtins.int + PAGE_TOKEN_FIELD_NUMBER: builtins.int + FILTER_FIELD_NUMBER: builtins.int + parent: builtins.str + page_size: builtins.int + page_token: builtins.str + filter: builtins.str + def __init__( + self, + *, + parent: builtins.str = ..., + page_size: builtins.int = ..., + page_token: builtins.str = ..., + filter: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["filter", b"filter", "page_size", b"page_size", "page_token", b"page_token", "parent", b"parent"]) -> None: ... + +Global___ListExportersRequest: typing_extensions.TypeAlias = ListExportersRequest + +@typing.final +class ListExportersResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + EXPORTERS_FIELD_NUMBER: builtins.int + NEXT_PAGE_TOKEN_FIELD_NUMBER: builtins.int + next_page_token: builtins.str + @property + def exporters(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[Global___Exporter]: ... + def __init__( + self, + *, + exporters: collections.abc.Iterable[Global___Exporter] | None = ..., + next_page_token: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["exporters", b"exporters", "next_page_token", b"next_page_token"]) -> None: ... + +Global___ListExportersResponse: typing_extensions.TypeAlias = ListExportersResponse + +@typing.final +class GetLeaseRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + name: builtins.str + def __init__( + self, + *, + name: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["name", b"name"]) -> None: ... + +Global___GetLeaseRequest: typing_extensions.TypeAlias = GetLeaseRequest + +@typing.final +class ListLeasesRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PARENT_FIELD_NUMBER: builtins.int + PAGE_SIZE_FIELD_NUMBER: builtins.int + PAGE_TOKEN_FIELD_NUMBER: builtins.int + FILTER_FIELD_NUMBER: builtins.int + ONLY_ACTIVE_FIELD_NUMBER: builtins.int + parent: builtins.str + page_size: builtins.int + page_token: builtins.str + filter: builtins.str + only_active: builtins.bool + def __init__( + self, + *, + parent: builtins.str = ..., + page_size: builtins.int = ..., + page_token: builtins.str = ..., + filter: builtins.str = ..., + only_active: builtins.bool | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["_only_active", b"_only_active", "only_active", b"only_active"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["_only_active", b"_only_active", "filter", b"filter", "only_active", b"only_active", "page_size", b"page_size", "page_token", b"page_token", "parent", b"parent"]) -> None: ... + def WhichOneof(self, oneof_group: typing.Literal["_only_active", b"_only_active"]) -> typing.Literal["only_active"] | None: ... + +Global___ListLeasesRequest: typing_extensions.TypeAlias = ListLeasesRequest + +@typing.final +class ListLeasesResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + LEASES_FIELD_NUMBER: builtins.int + NEXT_PAGE_TOKEN_FIELD_NUMBER: builtins.int + next_page_token: builtins.str + @property + def leases(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[Global___Lease]: ... + def __init__( + self, + *, + leases: collections.abc.Iterable[Global___Lease] | None = ..., + next_page_token: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["leases", b"leases", "next_page_token", b"next_page_token"]) -> None: ... + +Global___ListLeasesResponse: typing_extensions.TypeAlias = ListLeasesResponse + +@typing.final +class CreateLeaseRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PARENT_FIELD_NUMBER: builtins.int + LEASE_ID_FIELD_NUMBER: builtins.int + LEASE_FIELD_NUMBER: builtins.int + parent: builtins.str + lease_id: builtins.str + @property + def lease(self) -> Global___Lease: ... + def __init__( + self, + *, + parent: builtins.str = ..., + lease_id: builtins.str = ..., + lease: Global___Lease | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["lease", b"lease"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["lease", b"lease", "lease_id", b"lease_id", "parent", b"parent"]) -> None: ... + +Global___CreateLeaseRequest: typing_extensions.TypeAlias = CreateLeaseRequest + +@typing.final +class UpdateLeaseRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + LEASE_FIELD_NUMBER: builtins.int + UPDATE_MASK_FIELD_NUMBER: builtins.int + @property + def lease(self) -> Global___Lease: ... + @property + def update_mask(self) -> google.protobuf.field_mask_pb2.FieldMask: ... + def __init__( + self, + *, + lease: Global___Lease | None = ..., + update_mask: google.protobuf.field_mask_pb2.FieldMask | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["lease", b"lease", "update_mask", b"update_mask"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["lease", b"lease", "update_mask", b"update_mask"]) -> None: ... + +Global___UpdateLeaseRequest: typing_extensions.TypeAlias = UpdateLeaseRequest + +@typing.final +class DeleteLeaseRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + name: builtins.str + def __init__( + self, + *, + name: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["name", b"name"]) -> None: ... + +Global___DeleteLeaseRequest: typing_extensions.TypeAlias = DeleteLeaseRequest diff --git a/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/client/v1/client_pb2_grpc.pyi b/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/client/v1/client_pb2_grpc.pyi new file mode 100644 index 000000000..a5aa7937b --- /dev/null +++ b/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/client/v1/client_pb2_grpc.pyi @@ -0,0 +1,307 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +Copyright 2024 The Jumpstarter Authors +(-- api-linter: core::0215::foreign-type-reference=disabled +(-- api-linter: core::0192::has-comments=disabled +(-- api-linter: core::0191::java-package=disabled +(-- api-linter: core::0191::java-outer-classname=disabled +(-- api-linter: core::0191::java-multiple-files=disabled +""" + +import abc +import collections.abc +import google.protobuf.empty_pb2 +import grpc +import grpc.aio +import jumpstarter.client.v1.client_pb2 +import sys +import typing + +if sys.version_info >= (3, 13): + import typing as typing_extensions +else: + import typing_extensions + +_T = typing.TypeVar("_T") + +class _MaybeAsyncIterator(collections.abc.AsyncIterator[_T], collections.abc.Iterator[_T], metaclass=abc.ABCMeta): ... + +class _ServicerContext(grpc.ServicerContext, grpc.aio.ServicerContext): # type: ignore[misc, type-arg] + ... + +GRPC_GENERATED_VERSION: str +GRPC_VERSION: str +_ClientServiceGetExporterType = typing_extensions.TypeVar( + '_ClientServiceGetExporterType', + grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.GetExporterRequest, + jumpstarter.client.v1.client_pb2.Exporter, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.GetExporterRequest, + jumpstarter.client.v1.client_pb2.Exporter, + ], + default=grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.GetExporterRequest, + jumpstarter.client.v1.client_pb2.Exporter, + ], +) + +_ClientServiceListExportersType = typing_extensions.TypeVar( + '_ClientServiceListExportersType', + grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.ListExportersRequest, + jumpstarter.client.v1.client_pb2.ListExportersResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.ListExportersRequest, + jumpstarter.client.v1.client_pb2.ListExportersResponse, + ], + default=grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.ListExportersRequest, + jumpstarter.client.v1.client_pb2.ListExportersResponse, + ], +) + +_ClientServiceGetLeaseType = typing_extensions.TypeVar( + '_ClientServiceGetLeaseType', + grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.GetLeaseRequest, + jumpstarter.client.v1.client_pb2.Lease, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.GetLeaseRequest, + jumpstarter.client.v1.client_pb2.Lease, + ], + default=grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.GetLeaseRequest, + jumpstarter.client.v1.client_pb2.Lease, + ], +) + +_ClientServiceListLeasesType = typing_extensions.TypeVar( + '_ClientServiceListLeasesType', + grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.ListLeasesRequest, + jumpstarter.client.v1.client_pb2.ListLeasesResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.ListLeasesRequest, + jumpstarter.client.v1.client_pb2.ListLeasesResponse, + ], + default=grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.ListLeasesRequest, + jumpstarter.client.v1.client_pb2.ListLeasesResponse, + ], +) + +_ClientServiceCreateLeaseType = typing_extensions.TypeVar( + '_ClientServiceCreateLeaseType', + grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.CreateLeaseRequest, + jumpstarter.client.v1.client_pb2.Lease, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.CreateLeaseRequest, + jumpstarter.client.v1.client_pb2.Lease, + ], + default=grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.CreateLeaseRequest, + jumpstarter.client.v1.client_pb2.Lease, + ], +) + +_ClientServiceUpdateLeaseType = typing_extensions.TypeVar( + '_ClientServiceUpdateLeaseType', + grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.UpdateLeaseRequest, + jumpstarter.client.v1.client_pb2.Lease, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.UpdateLeaseRequest, + jumpstarter.client.v1.client_pb2.Lease, + ], + default=grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.UpdateLeaseRequest, + jumpstarter.client.v1.client_pb2.Lease, + ], +) + +_ClientServiceDeleteLeaseType = typing_extensions.TypeVar( + '_ClientServiceDeleteLeaseType', + grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.DeleteLeaseRequest, + google.protobuf.empty_pb2.Empty, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.DeleteLeaseRequest, + google.protobuf.empty_pb2.Empty, + ], + default=grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.DeleteLeaseRequest, + google.protobuf.empty_pb2.Empty, + ], +) + +class ClientServiceStub(typing.Generic[_ClientServiceGetExporterType, _ClientServiceListExportersType, _ClientServiceGetLeaseType, _ClientServiceListLeasesType, _ClientServiceCreateLeaseType, _ClientServiceUpdateLeaseType, _ClientServiceDeleteLeaseType]): + @typing.overload + def __init__(self: ClientServiceStub[ + grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.GetExporterRequest, + jumpstarter.client.v1.client_pb2.Exporter, + ], + grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.ListExportersRequest, + jumpstarter.client.v1.client_pb2.ListExportersResponse, + ], + grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.GetLeaseRequest, + jumpstarter.client.v1.client_pb2.Lease, + ], + grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.ListLeasesRequest, + jumpstarter.client.v1.client_pb2.ListLeasesResponse, + ], + grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.CreateLeaseRequest, + jumpstarter.client.v1.client_pb2.Lease, + ], + grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.UpdateLeaseRequest, + jumpstarter.client.v1.client_pb2.Lease, + ], + grpc.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.DeleteLeaseRequest, + google.protobuf.empty_pb2.Empty, + ], + ], channel: grpc.Channel) -> None: ... + + @typing.overload + def __init__(self: ClientServiceStub[ + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.GetExporterRequest, + jumpstarter.client.v1.client_pb2.Exporter, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.ListExportersRequest, + jumpstarter.client.v1.client_pb2.ListExportersResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.GetLeaseRequest, + jumpstarter.client.v1.client_pb2.Lease, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.ListLeasesRequest, + jumpstarter.client.v1.client_pb2.ListLeasesResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.CreateLeaseRequest, + jumpstarter.client.v1.client_pb2.Lease, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.UpdateLeaseRequest, + jumpstarter.client.v1.client_pb2.Lease, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.DeleteLeaseRequest, + google.protobuf.empty_pb2.Empty, + ], + ], channel: grpc.aio.Channel) -> None: ... + + GetExporter: _ClientServiceGetExporterType + + ListExporters: _ClientServiceListExportersType + + GetLease: _ClientServiceGetLeaseType + + ListLeases: _ClientServiceListLeasesType + + CreateLease: _ClientServiceCreateLeaseType + + UpdateLease: _ClientServiceUpdateLeaseType + + DeleteLease: _ClientServiceDeleteLeaseType + +ClientServiceAsyncStub: typing_extensions.TypeAlias = ClientServiceStub[ + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.GetExporterRequest, + jumpstarter.client.v1.client_pb2.Exporter, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.ListExportersRequest, + jumpstarter.client.v1.client_pb2.ListExportersResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.GetLeaseRequest, + jumpstarter.client.v1.client_pb2.Lease, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.ListLeasesRequest, + jumpstarter.client.v1.client_pb2.ListLeasesResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.CreateLeaseRequest, + jumpstarter.client.v1.client_pb2.Lease, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.UpdateLeaseRequest, + jumpstarter.client.v1.client_pb2.Lease, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.client.v1.client_pb2.DeleteLeaseRequest, + google.protobuf.empty_pb2.Empty, + ], +] + +class ClientServiceServicer(metaclass=abc.ABCMeta): + @abc.abstractmethod + def GetExporter( + self, + request: jumpstarter.client.v1.client_pb2.GetExporterRequest, + context: _ServicerContext, + ) -> typing.Union[jumpstarter.client.v1.client_pb2.Exporter, collections.abc.Awaitable[jumpstarter.client.v1.client_pb2.Exporter]]: ... + + @abc.abstractmethod + def ListExporters( + self, + request: jumpstarter.client.v1.client_pb2.ListExportersRequest, + context: _ServicerContext, + ) -> typing.Union[jumpstarter.client.v1.client_pb2.ListExportersResponse, collections.abc.Awaitable[jumpstarter.client.v1.client_pb2.ListExportersResponse]]: ... + + @abc.abstractmethod + def GetLease( + self, + request: jumpstarter.client.v1.client_pb2.GetLeaseRequest, + context: _ServicerContext, + ) -> typing.Union[jumpstarter.client.v1.client_pb2.Lease, collections.abc.Awaitable[jumpstarter.client.v1.client_pb2.Lease]]: ... + + @abc.abstractmethod + def ListLeases( + self, + request: jumpstarter.client.v1.client_pb2.ListLeasesRequest, + context: _ServicerContext, + ) -> typing.Union[jumpstarter.client.v1.client_pb2.ListLeasesResponse, collections.abc.Awaitable[jumpstarter.client.v1.client_pb2.ListLeasesResponse]]: ... + + @abc.abstractmethod + def CreateLease( + self, + request: jumpstarter.client.v1.client_pb2.CreateLeaseRequest, + context: _ServicerContext, + ) -> typing.Union[jumpstarter.client.v1.client_pb2.Lease, collections.abc.Awaitable[jumpstarter.client.v1.client_pb2.Lease]]: ... + + @abc.abstractmethod + def UpdateLease( + self, + request: jumpstarter.client.v1.client_pb2.UpdateLeaseRequest, + context: _ServicerContext, + ) -> typing.Union[jumpstarter.client.v1.client_pb2.Lease, collections.abc.Awaitable[jumpstarter.client.v1.client_pb2.Lease]]: ... + + @abc.abstractmethod + def DeleteLease( + self, + request: jumpstarter.client.v1.client_pb2.DeleteLeaseRequest, + context: _ServicerContext, + ) -> typing.Union[google.protobuf.empty_pb2.Empty, collections.abc.Awaitable[google.protobuf.empty_pb2.Empty]]: ... + +def add_ClientServiceServicer_to_server(servicer: ClientServiceServicer, server: typing.Union[grpc.Server, grpc.aio.Server]) -> None: ... diff --git a/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/common_pb2.pyi b/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/common_pb2.pyi new file mode 100644 index 000000000..f433f1db2 --- /dev/null +++ b/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/common_pb2.pyi @@ -0,0 +1,96 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +Copyright 2024 The Jumpstarter Authors""" + +import builtins +import google.protobuf.descriptor +import google.protobuf.internal.enum_type_wrapper +import sys +import typing + +if sys.version_info >= (3, 10): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class _ExporterStatus: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + +class _ExporterStatusEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_ExporterStatus.ValueType], builtins.type): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + EXPORTER_STATUS_UNSPECIFIED: _ExporterStatus.ValueType # 0 + """Unspecified exporter status""" + EXPORTER_STATUS_OFFLINE: _ExporterStatus.ValueType # 1 + """Exporter is offline""" + EXPORTER_STATUS_AVAILABLE: _ExporterStatus.ValueType # 2 + """Exporter is available to be leased""" + EXPORTER_STATUS_BEFORE_LEASE_HOOK: _ExporterStatus.ValueType # 3 + """Exporter is executing before lease hook(s)""" + EXPORTER_STATUS_LEASE_READY: _ExporterStatus.ValueType # 4 + """Exporter is leased and ready to accept commands""" + EXPORTER_STATUS_AFTER_LEASE_HOOK: _ExporterStatus.ValueType # 5 + """Exporter is executing after lease hook(s)""" + EXPORTER_STATUS_BEFORE_LEASE_HOOK_FAILED: _ExporterStatus.ValueType # 6 + """Exporter before lease hook failed""" + EXPORTER_STATUS_AFTER_LEASE_HOOK_FAILED: _ExporterStatus.ValueType # 7 + """Exporter after lease hook failed""" + +class ExporterStatus(_ExporterStatus, metaclass=_ExporterStatusEnumTypeWrapper): + """Shared types used across multiple Jumpstarter services + + Exporter status information + """ + +EXPORTER_STATUS_UNSPECIFIED: ExporterStatus.ValueType # 0 +"""Unspecified exporter status""" +EXPORTER_STATUS_OFFLINE: ExporterStatus.ValueType # 1 +"""Exporter is offline""" +EXPORTER_STATUS_AVAILABLE: ExporterStatus.ValueType # 2 +"""Exporter is available to be leased""" +EXPORTER_STATUS_BEFORE_LEASE_HOOK: ExporterStatus.ValueType # 3 +"""Exporter is executing before lease hook(s)""" +EXPORTER_STATUS_LEASE_READY: ExporterStatus.ValueType # 4 +"""Exporter is leased and ready to accept commands""" +EXPORTER_STATUS_AFTER_LEASE_HOOK: ExporterStatus.ValueType # 5 +"""Exporter is executing after lease hook(s)""" +EXPORTER_STATUS_BEFORE_LEASE_HOOK_FAILED: ExporterStatus.ValueType # 6 +"""Exporter before lease hook failed""" +EXPORTER_STATUS_AFTER_LEASE_HOOK_FAILED: ExporterStatus.ValueType # 7 +"""Exporter after lease hook failed""" +Global___ExporterStatus: typing_extensions.TypeAlias = ExporterStatus + +class _LogSource: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + +class _LogSourceEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_LogSource.ValueType], builtins.type): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + LOG_SOURCE_UNSPECIFIED: _LogSource.ValueType # 0 + """Unspecified log source""" + LOG_SOURCE_DRIVER: _LogSource.ValueType # 1 + """Driver/device logs""" + LOG_SOURCE_BEFORE_LEASE_HOOK: _LogSource.ValueType # 2 + """beforeLease hook execution logs""" + LOG_SOURCE_AFTER_LEASE_HOOK: _LogSource.ValueType # 3 + """afterLease hook execution logs""" + LOG_SOURCE_SYSTEM: _LogSource.ValueType # 4 + """System/exporter logs""" + +class LogSource(_LogSource, metaclass=_LogSourceEnumTypeWrapper): + """Source of log stream messages""" + +LOG_SOURCE_UNSPECIFIED: LogSource.ValueType # 0 +"""Unspecified log source""" +LOG_SOURCE_DRIVER: LogSource.ValueType # 1 +"""Driver/device logs""" +LOG_SOURCE_BEFORE_LEASE_HOOK: LogSource.ValueType # 2 +"""beforeLease hook execution logs""" +LOG_SOURCE_AFTER_LEASE_HOOK: LogSource.ValueType # 3 +"""afterLease hook execution logs""" +LOG_SOURCE_SYSTEM: LogSource.ValueType # 4 +"""System/exporter logs""" +Global___LogSource: typing_extensions.TypeAlias = LogSource diff --git a/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/common_pb2_grpc.pyi b/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/common_pb2_grpc.pyi new file mode 100644 index 000000000..6aac97060 --- /dev/null +++ b/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/common_pb2_grpc.pyi @@ -0,0 +1,20 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +Copyright 2024 The Jumpstarter Authors""" + +import abc +import collections.abc +import grpc +import grpc.aio +import typing + +_T = typing.TypeVar("_T") + +class _MaybeAsyncIterator(collections.abc.AsyncIterator[_T], collections.abc.Iterator[_T], metaclass=abc.ABCMeta): ... + +class _ServicerContext(grpc.ServicerContext, grpc.aio.ServicerContext): # type: ignore[misc, type-arg] + ... + +GRPC_GENERATED_VERSION: str +GRPC_VERSION: str diff --git a/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2.pyi b/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2.pyi new file mode 100644 index 000000000..762c46c62 --- /dev/null +++ b/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2.pyi @@ -0,0 +1,717 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +Copyright 2024 The Jumpstarter Authors""" + +import builtins +import collections.abc +import google.protobuf.descriptor +import google.protobuf.duration_pb2 +import google.protobuf.internal.containers +import google.protobuf.message +import google.protobuf.struct_pb2 +import google.protobuf.timestamp_pb2 +import jumpstarter.v1.common_pb2 +import jumpstarter.v1.kubernetes_pb2 +import sys +import typing + +if sys.version_info >= (3, 10): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +@typing.final +class RegisterRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing.final + class LabelsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + + LABELS_FIELD_NUMBER: builtins.int + REPORTS_FIELD_NUMBER: builtins.int + @property + def labels(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: + """additional context: + - token/authentication mechanism + """ + + @property + def reports(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[Global___DriverInstanceReport]: + """standard labels: + jumpstarter.dev/hostname= + jumpstarter.dev/name= + """ + + def __init__( + self, + *, + labels: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + reports: collections.abc.Iterable[Global___DriverInstanceReport] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["labels", b"labels", "reports", b"reports"]) -> None: ... + +Global___RegisterRequest: typing_extensions.TypeAlias = RegisterRequest + +@typing.final +class DriverInstanceReport(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing.final + class LabelsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + + @typing.final + class MethodsDescriptionEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + + UUID_FIELD_NUMBER: builtins.int + PARENT_UUID_FIELD_NUMBER: builtins.int + LABELS_FIELD_NUMBER: builtins.int + DESCRIPTION_FIELD_NUMBER: builtins.int + METHODS_DESCRIPTION_FIELD_NUMBER: builtins.int + uuid: builtins.str + """a unique id within the exporter""" + parent_uuid: builtins.str + """optional, if device has a parent device""" + description: builtins.str + """optional custom driver description for CLI""" + @property + def labels(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ... + @property + def methods_description(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: + """method name -> help text for CLI""" + + def __init__( + self, + *, + uuid: builtins.str = ..., + parent_uuid: builtins.str | None = ..., + labels: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + description: builtins.str | None = ..., + methods_description: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["_description", b"_description", "_parent_uuid", b"_parent_uuid", "description", b"description", "parent_uuid", b"parent_uuid"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["_description", b"_description", "_parent_uuid", b"_parent_uuid", "description", b"description", "labels", b"labels", "methods_description", b"methods_description", "parent_uuid", b"parent_uuid", "uuid", b"uuid"]) -> None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_description", b"_description"]) -> typing.Literal["description"] | None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_parent_uuid", b"_parent_uuid"]) -> typing.Literal["parent_uuid"] | None: ... + +Global___DriverInstanceReport: typing_extensions.TypeAlias = DriverInstanceReport + +@typing.final +class RegisterResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + UUID_FIELD_NUMBER: builtins.int + uuid: builtins.str + def __init__( + self, + *, + uuid: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["uuid", b"uuid"]) -> None: ... + +Global___RegisterResponse: typing_extensions.TypeAlias = RegisterResponse + +@typing.final +class UnregisterRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + REASON_FIELD_NUMBER: builtins.int + reason: builtins.str + def __init__( + self, + *, + reason: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["reason", b"reason"]) -> None: ... + +Global___UnregisterRequest: typing_extensions.TypeAlias = UnregisterRequest + +@typing.final +class UnregisterResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + def __init__( + self, + ) -> None: ... + +Global___UnregisterResponse: typing_extensions.TypeAlias = UnregisterResponse + +@typing.final +class ListenRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + LEASE_NAME_FIELD_NUMBER: builtins.int + lease_name: builtins.str + def __init__( + self, + *, + lease_name: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["lease_name", b"lease_name"]) -> None: ... + +Global___ListenRequest: typing_extensions.TypeAlias = ListenRequest + +@typing.final +class ListenResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + ROUTER_ENDPOINT_FIELD_NUMBER: builtins.int + ROUTER_TOKEN_FIELD_NUMBER: builtins.int + router_endpoint: builtins.str + router_token: builtins.str + def __init__( + self, + *, + router_endpoint: builtins.str = ..., + router_token: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["router_endpoint", b"router_endpoint", "router_token", b"router_token"]) -> None: ... + +Global___ListenResponse: typing_extensions.TypeAlias = ListenResponse + +@typing.final +class StatusRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + def __init__( + self, + ) -> None: ... + +Global___StatusRequest: typing_extensions.TypeAlias = StatusRequest + +@typing.final +class StatusResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + LEASED_FIELD_NUMBER: builtins.int + LEASE_NAME_FIELD_NUMBER: builtins.int + CLIENT_NAME_FIELD_NUMBER: builtins.int + leased: builtins.bool + lease_name: builtins.str + client_name: builtins.str + def __init__( + self, + *, + leased: builtins.bool = ..., + lease_name: builtins.str | None = ..., + client_name: builtins.str | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["_client_name", b"_client_name", "_lease_name", b"_lease_name", "client_name", b"client_name", "lease_name", b"lease_name"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["_client_name", b"_client_name", "_lease_name", b"_lease_name", "client_name", b"client_name", "lease_name", b"lease_name", "leased", b"leased"]) -> None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_client_name", b"_client_name"]) -> typing.Literal["client_name"] | None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_lease_name", b"_lease_name"]) -> typing.Literal["lease_name"] | None: ... + +Global___StatusResponse: typing_extensions.TypeAlias = StatusResponse + +@typing.final +class DialRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + LEASE_NAME_FIELD_NUMBER: builtins.int + lease_name: builtins.str + def __init__( + self, + *, + lease_name: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["lease_name", b"lease_name"]) -> None: ... + +Global___DialRequest: typing_extensions.TypeAlias = DialRequest + +@typing.final +class DialResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + ROUTER_ENDPOINT_FIELD_NUMBER: builtins.int + ROUTER_TOKEN_FIELD_NUMBER: builtins.int + router_endpoint: builtins.str + router_token: builtins.str + def __init__( + self, + *, + router_endpoint: builtins.str = ..., + router_token: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["router_endpoint", b"router_endpoint", "router_token", b"router_token"]) -> None: ... + +Global___DialResponse: typing_extensions.TypeAlias = DialResponse + +@typing.final +class AuditStreamRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + EXPORTER_UUID_FIELD_NUMBER: builtins.int + DRIVER_INSTANCE_UUID_FIELD_NUMBER: builtins.int + SEVERITY_FIELD_NUMBER: builtins.int + MESSAGE_FIELD_NUMBER: builtins.int + exporter_uuid: builtins.str + """additional context: + - token/authentication mechanism + """ + driver_instance_uuid: builtins.str + severity: builtins.str + message: builtins.str + def __init__( + self, + *, + exporter_uuid: builtins.str = ..., + driver_instance_uuid: builtins.str = ..., + severity: builtins.str = ..., + message: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["driver_instance_uuid", b"driver_instance_uuid", "exporter_uuid", b"exporter_uuid", "message", b"message", "severity", b"severity"]) -> None: ... + +Global___AuditStreamRequest: typing_extensions.TypeAlias = AuditStreamRequest + +@typing.final +class ReportStatusRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + STATUS_FIELD_NUMBER: builtins.int + MESSAGE_FIELD_NUMBER: builtins.int + status: jumpstarter.v1.common_pb2.ExporterStatus.ValueType + message: builtins.str + """Optional human-readable status message""" + def __init__( + self, + *, + status: jumpstarter.v1.common_pb2.ExporterStatus.ValueType = ..., + message: builtins.str | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["_message", b"_message", "message", b"message"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["_message", b"_message", "message", b"message", "status", b"status"]) -> None: ... + def WhichOneof(self, oneof_group: typing.Literal["_message", b"_message"]) -> typing.Literal["message"] | None: ... + +Global___ReportStatusRequest: typing_extensions.TypeAlias = ReportStatusRequest + +@typing.final +class ReportStatusResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + def __init__( + self, + ) -> None: ... + +Global___ReportStatusResponse: typing_extensions.TypeAlias = ReportStatusResponse + +@typing.final +class GetReportResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing.final + class LabelsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + + UUID_FIELD_NUMBER: builtins.int + LABELS_FIELD_NUMBER: builtins.int + REPORTS_FIELD_NUMBER: builtins.int + ALTERNATIVE_ENDPOINTS_FIELD_NUMBER: builtins.int + uuid: builtins.str + @property + def labels(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ... + @property + def reports(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[Global___DriverInstanceReport]: + """standard labels: + jumpstarter.dev/hostname= + jumpstarter.dev/name= + """ + + @property + def alternative_endpoints(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[Global___Endpoint]: ... + def __init__( + self, + *, + uuid: builtins.str = ..., + labels: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + reports: collections.abc.Iterable[Global___DriverInstanceReport] | None = ..., + alternative_endpoints: collections.abc.Iterable[Global___Endpoint] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["alternative_endpoints", b"alternative_endpoints", "labels", b"labels", "reports", b"reports", "uuid", b"uuid"]) -> None: ... + +Global___GetReportResponse: typing_extensions.TypeAlias = GetReportResponse + +@typing.final +class Endpoint(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + ENDPOINT_FIELD_NUMBER: builtins.int + CERTIFICATE_FIELD_NUMBER: builtins.int + CLIENT_CERTIFICATE_FIELD_NUMBER: builtins.int + CLIENT_PRIVATE_KEY_FIELD_NUMBER: builtins.int + endpoint: builtins.str + certificate: builtins.str + client_certificate: builtins.str + client_private_key: builtins.str + def __init__( + self, + *, + endpoint: builtins.str = ..., + certificate: builtins.str = ..., + client_certificate: builtins.str = ..., + client_private_key: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["certificate", b"certificate", "client_certificate", b"client_certificate", "client_private_key", b"client_private_key", "endpoint", b"endpoint"]) -> None: ... + +Global___Endpoint: typing_extensions.TypeAlias = Endpoint + +@typing.final +class DriverCallRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + UUID_FIELD_NUMBER: builtins.int + METHOD_FIELD_NUMBER: builtins.int + ARGS_FIELD_NUMBER: builtins.int + uuid: builtins.str + method: builtins.str + @property + def args(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[google.protobuf.struct_pb2.Value]: ... + def __init__( + self, + *, + uuid: builtins.str = ..., + method: builtins.str = ..., + args: collections.abc.Iterable[google.protobuf.struct_pb2.Value] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["args", b"args", "method", b"method", "uuid", b"uuid"]) -> None: ... + +Global___DriverCallRequest: typing_extensions.TypeAlias = DriverCallRequest + +@typing.final +class DriverCallResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + UUID_FIELD_NUMBER: builtins.int + RESULT_FIELD_NUMBER: builtins.int + uuid: builtins.str + @property + def result(self) -> google.protobuf.struct_pb2.Value: ... + def __init__( + self, + *, + uuid: builtins.str = ..., + result: google.protobuf.struct_pb2.Value | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["result", b"result"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["result", b"result", "uuid", b"uuid"]) -> None: ... + +Global___DriverCallResponse: typing_extensions.TypeAlias = DriverCallResponse + +@typing.final +class StreamingDriverCallRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + UUID_FIELD_NUMBER: builtins.int + METHOD_FIELD_NUMBER: builtins.int + ARGS_FIELD_NUMBER: builtins.int + uuid: builtins.str + method: builtins.str + @property + def args(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[google.protobuf.struct_pb2.Value]: ... + def __init__( + self, + *, + uuid: builtins.str = ..., + method: builtins.str = ..., + args: collections.abc.Iterable[google.protobuf.struct_pb2.Value] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["args", b"args", "method", b"method", "uuid", b"uuid"]) -> None: ... + +Global___StreamingDriverCallRequest: typing_extensions.TypeAlias = StreamingDriverCallRequest + +@typing.final +class StreamingDriverCallResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + UUID_FIELD_NUMBER: builtins.int + RESULT_FIELD_NUMBER: builtins.int + uuid: builtins.str + @property + def result(self) -> google.protobuf.struct_pb2.Value: ... + def __init__( + self, + *, + uuid: builtins.str = ..., + result: google.protobuf.struct_pb2.Value | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["result", b"result"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["result", b"result", "uuid", b"uuid"]) -> None: ... + +Global___StreamingDriverCallResponse: typing_extensions.TypeAlias = StreamingDriverCallResponse + +@typing.final +class LogStreamResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + UUID_FIELD_NUMBER: builtins.int + SEVERITY_FIELD_NUMBER: builtins.int + MESSAGE_FIELD_NUMBER: builtins.int + SOURCE_FIELD_NUMBER: builtins.int + uuid: builtins.str + severity: builtins.str + message: builtins.str + source: jumpstarter.v1.common_pb2.LogSource.ValueType + """New optional field""" + def __init__( + self, + *, + uuid: builtins.str = ..., + severity: builtins.str = ..., + message: builtins.str = ..., + source: jumpstarter.v1.common_pb2.LogSource.ValueType | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["_source", b"_source", "source", b"source"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["_source", b"_source", "message", b"message", "severity", b"severity", "source", b"source", "uuid", b"uuid"]) -> None: ... + def WhichOneof(self, oneof_group: typing.Literal["_source", b"_source"]) -> typing.Literal["source"] | None: ... + +Global___LogStreamResponse: typing_extensions.TypeAlias = LogStreamResponse + +@typing.final +class ResetRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + def __init__( + self, + ) -> None: ... + +Global___ResetRequest: typing_extensions.TypeAlias = ResetRequest + +@typing.final +class ResetResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + def __init__( + self, + ) -> None: ... + +Global___ResetResponse: typing_extensions.TypeAlias = ResetResponse + +@typing.final +class GetLeaseRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + name: builtins.str + def __init__( + self, + *, + name: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["name", b"name"]) -> None: ... + +Global___GetLeaseRequest: typing_extensions.TypeAlias = GetLeaseRequest + +@typing.final +class GetLeaseResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + DURATION_FIELD_NUMBER: builtins.int + SELECTOR_FIELD_NUMBER: builtins.int + BEGIN_TIME_FIELD_NUMBER: builtins.int + END_TIME_FIELD_NUMBER: builtins.int + EXPORTER_UUID_FIELD_NUMBER: builtins.int + CONDITIONS_FIELD_NUMBER: builtins.int + exporter_uuid: builtins.str + @property + def duration(self) -> google.protobuf.duration_pb2.Duration: ... + @property + def selector(self) -> jumpstarter.v1.kubernetes_pb2.LabelSelector: ... + @property + def begin_time(self) -> google.protobuf.timestamp_pb2.Timestamp: ... + @property + def end_time(self) -> google.protobuf.timestamp_pb2.Timestamp: ... + @property + def conditions(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[jumpstarter.v1.kubernetes_pb2.Condition]: ... + def __init__( + self, + *, + duration: google.protobuf.duration_pb2.Duration | None = ..., + selector: jumpstarter.v1.kubernetes_pb2.LabelSelector | None = ..., + begin_time: google.protobuf.timestamp_pb2.Timestamp | None = ..., + end_time: google.protobuf.timestamp_pb2.Timestamp | None = ..., + exporter_uuid: builtins.str | None = ..., + conditions: collections.abc.Iterable[jumpstarter.v1.kubernetes_pb2.Condition] | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["_begin_time", b"_begin_time", "_end_time", b"_end_time", "_exporter_uuid", b"_exporter_uuid", "begin_time", b"begin_time", "duration", b"duration", "end_time", b"end_time", "exporter_uuid", b"exporter_uuid", "selector", b"selector"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["_begin_time", b"_begin_time", "_end_time", b"_end_time", "_exporter_uuid", b"_exporter_uuid", "begin_time", b"begin_time", "conditions", b"conditions", "duration", b"duration", "end_time", b"end_time", "exporter_uuid", b"exporter_uuid", "selector", b"selector"]) -> None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_begin_time", b"_begin_time"]) -> typing.Literal["begin_time"] | None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_end_time", b"_end_time"]) -> typing.Literal["end_time"] | None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_exporter_uuid", b"_exporter_uuid"]) -> typing.Literal["exporter_uuid"] | None: ... + +Global___GetLeaseResponse: typing_extensions.TypeAlias = GetLeaseResponse + +@typing.final +class RequestLeaseRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + DURATION_FIELD_NUMBER: builtins.int + SELECTOR_FIELD_NUMBER: builtins.int + @property + def duration(self) -> google.protobuf.duration_pb2.Duration: ... + @property + def selector(self) -> jumpstarter.v1.kubernetes_pb2.LabelSelector: ... + def __init__( + self, + *, + duration: google.protobuf.duration_pb2.Duration | None = ..., + selector: jumpstarter.v1.kubernetes_pb2.LabelSelector | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["duration", b"duration", "selector", b"selector"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["duration", b"duration", "selector", b"selector"]) -> None: ... + +Global___RequestLeaseRequest: typing_extensions.TypeAlias = RequestLeaseRequest + +@typing.final +class RequestLeaseResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + name: builtins.str + def __init__( + self, + *, + name: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["name", b"name"]) -> None: ... + +Global___RequestLeaseResponse: typing_extensions.TypeAlias = RequestLeaseResponse + +@typing.final +class ReleaseLeaseRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAME_FIELD_NUMBER: builtins.int + name: builtins.str + def __init__( + self, + *, + name: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["name", b"name"]) -> None: ... + +Global___ReleaseLeaseRequest: typing_extensions.TypeAlias = ReleaseLeaseRequest + +@typing.final +class ReleaseLeaseResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + def __init__( + self, + ) -> None: ... + +Global___ReleaseLeaseResponse: typing_extensions.TypeAlias = ReleaseLeaseResponse + +@typing.final +class ListLeasesRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + def __init__( + self, + ) -> None: ... + +Global___ListLeasesRequest: typing_extensions.TypeAlias = ListLeasesRequest + +@typing.final +class ListLeasesResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + NAMES_FIELD_NUMBER: builtins.int + @property + def names(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... + def __init__( + self, + *, + names: collections.abc.Iterable[builtins.str] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["names", b"names"]) -> None: ... + +Global___ListLeasesResponse: typing_extensions.TypeAlias = ListLeasesResponse + +@typing.final +class GetStatusRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + def __init__( + self, + ) -> None: ... + +Global___GetStatusRequest: typing_extensions.TypeAlias = GetStatusRequest + +@typing.final +class GetStatusResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + STATUS_FIELD_NUMBER: builtins.int + MESSAGE_FIELD_NUMBER: builtins.int + status: jumpstarter.v1.common_pb2.ExporterStatus.ValueType + message: builtins.str + def __init__( + self, + *, + status: jumpstarter.v1.common_pb2.ExporterStatus.ValueType = ..., + message: builtins.str | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["_message", b"_message", "message", b"message"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["_message", b"_message", "message", b"message", "status", b"status"]) -> None: ... + def WhichOneof(self, oneof_group: typing.Literal["_message", b"_message"]) -> typing.Literal["message"] | None: ... + +Global___GetStatusResponse: typing_extensions.TypeAlias = GetStatusResponse diff --git a/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2_grpc.pyi b/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2_grpc.pyi new file mode 100644 index 000000000..78c9ffbb2 --- /dev/null +++ b/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/jumpstarter_pb2_grpc.pyi @@ -0,0 +1,752 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +Copyright 2024 The Jumpstarter Authors""" + +import abc +import collections.abc +import google.protobuf.empty_pb2 +import grpc +import grpc.aio +import jumpstarter.v1.jumpstarter_pb2 +import sys +import typing + +if sys.version_info >= (3, 13): + import typing as typing_extensions +else: + import typing_extensions + +_T = typing.TypeVar("_T") + +class _MaybeAsyncIterator(collections.abc.AsyncIterator[_T], collections.abc.Iterator[_T], metaclass=abc.ABCMeta): ... + +class _ServicerContext(grpc.ServicerContext, grpc.aio.ServicerContext): # type: ignore[misc, type-arg] + ... + +GRPC_GENERATED_VERSION: str +GRPC_VERSION: str +_ControllerServiceRegisterType = typing_extensions.TypeVar( + '_ControllerServiceRegisterType', + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.RegisterRequest, + jumpstarter.v1.jumpstarter_pb2.RegisterResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.RegisterRequest, + jumpstarter.v1.jumpstarter_pb2.RegisterResponse, + ], + default=grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.RegisterRequest, + jumpstarter.v1.jumpstarter_pb2.RegisterResponse, + ], +) + +_ControllerServiceUnregisterType = typing_extensions.TypeVar( + '_ControllerServiceUnregisterType', + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.UnregisterRequest, + jumpstarter.v1.jumpstarter_pb2.UnregisterResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.UnregisterRequest, + jumpstarter.v1.jumpstarter_pb2.UnregisterResponse, + ], + default=grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.UnregisterRequest, + jumpstarter.v1.jumpstarter_pb2.UnregisterResponse, + ], +) + +_ControllerServiceReportStatusType = typing_extensions.TypeVar( + '_ControllerServiceReportStatusType', + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ReportStatusRequest, + jumpstarter.v1.jumpstarter_pb2.ReportStatusResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ReportStatusRequest, + jumpstarter.v1.jumpstarter_pb2.ReportStatusResponse, + ], + default=grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ReportStatusRequest, + jumpstarter.v1.jumpstarter_pb2.ReportStatusResponse, + ], +) + +_ControllerServiceListenType = typing_extensions.TypeVar( + '_ControllerServiceListenType', + grpc.UnaryStreamMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ListenRequest, + jumpstarter.v1.jumpstarter_pb2.ListenResponse, + ], + grpc.aio.UnaryStreamMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ListenRequest, + jumpstarter.v1.jumpstarter_pb2.ListenResponse, + ], + default=grpc.UnaryStreamMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ListenRequest, + jumpstarter.v1.jumpstarter_pb2.ListenResponse, + ], +) + +_ControllerServiceStatusType = typing_extensions.TypeVar( + '_ControllerServiceStatusType', + grpc.UnaryStreamMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.StatusRequest, + jumpstarter.v1.jumpstarter_pb2.StatusResponse, + ], + grpc.aio.UnaryStreamMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.StatusRequest, + jumpstarter.v1.jumpstarter_pb2.StatusResponse, + ], + default=grpc.UnaryStreamMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.StatusRequest, + jumpstarter.v1.jumpstarter_pb2.StatusResponse, + ], +) + +_ControllerServiceDialType = typing_extensions.TypeVar( + '_ControllerServiceDialType', + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.DialRequest, + jumpstarter.v1.jumpstarter_pb2.DialResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.DialRequest, + jumpstarter.v1.jumpstarter_pb2.DialResponse, + ], + default=grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.DialRequest, + jumpstarter.v1.jumpstarter_pb2.DialResponse, + ], +) + +_ControllerServiceAuditStreamType = typing_extensions.TypeVar( + '_ControllerServiceAuditStreamType', + grpc.StreamUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.AuditStreamRequest, + google.protobuf.empty_pb2.Empty, + ], + grpc.aio.StreamUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.AuditStreamRequest, + google.protobuf.empty_pb2.Empty, + ], + default=grpc.StreamUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.AuditStreamRequest, + google.protobuf.empty_pb2.Empty, + ], +) + +_ControllerServiceGetLeaseType = typing_extensions.TypeVar( + '_ControllerServiceGetLeaseType', + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.GetLeaseRequest, + jumpstarter.v1.jumpstarter_pb2.GetLeaseResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.GetLeaseRequest, + jumpstarter.v1.jumpstarter_pb2.GetLeaseResponse, + ], + default=grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.GetLeaseRequest, + jumpstarter.v1.jumpstarter_pb2.GetLeaseResponse, + ], +) + +_ControllerServiceRequestLeaseType = typing_extensions.TypeVar( + '_ControllerServiceRequestLeaseType', + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.RequestLeaseRequest, + jumpstarter.v1.jumpstarter_pb2.RequestLeaseResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.RequestLeaseRequest, + jumpstarter.v1.jumpstarter_pb2.RequestLeaseResponse, + ], + default=grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.RequestLeaseRequest, + jumpstarter.v1.jumpstarter_pb2.RequestLeaseResponse, + ], +) + +_ControllerServiceReleaseLeaseType = typing_extensions.TypeVar( + '_ControllerServiceReleaseLeaseType', + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ReleaseLeaseRequest, + jumpstarter.v1.jumpstarter_pb2.ReleaseLeaseResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ReleaseLeaseRequest, + jumpstarter.v1.jumpstarter_pb2.ReleaseLeaseResponse, + ], + default=grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ReleaseLeaseRequest, + jumpstarter.v1.jumpstarter_pb2.ReleaseLeaseResponse, + ], +) + +_ControllerServiceListLeasesType = typing_extensions.TypeVar( + '_ControllerServiceListLeasesType', + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ListLeasesRequest, + jumpstarter.v1.jumpstarter_pb2.ListLeasesResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ListLeasesRequest, + jumpstarter.v1.jumpstarter_pb2.ListLeasesResponse, + ], + default=grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ListLeasesRequest, + jumpstarter.v1.jumpstarter_pb2.ListLeasesResponse, + ], +) + +class ControllerServiceStub(typing.Generic[_ControllerServiceRegisterType, _ControllerServiceUnregisterType, _ControllerServiceReportStatusType, _ControllerServiceListenType, _ControllerServiceStatusType, _ControllerServiceDialType, _ControllerServiceAuditStreamType, _ControllerServiceGetLeaseType, _ControllerServiceRequestLeaseType, _ControllerServiceReleaseLeaseType, _ControllerServiceListLeasesType]): + """A service where a exporter can connect to make itself available""" + + @typing.overload + def __init__(self: ControllerServiceStub[ + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.RegisterRequest, + jumpstarter.v1.jumpstarter_pb2.RegisterResponse, + ], + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.UnregisterRequest, + jumpstarter.v1.jumpstarter_pb2.UnregisterResponse, + ], + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ReportStatusRequest, + jumpstarter.v1.jumpstarter_pb2.ReportStatusResponse, + ], + grpc.UnaryStreamMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ListenRequest, + jumpstarter.v1.jumpstarter_pb2.ListenResponse, + ], + grpc.UnaryStreamMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.StatusRequest, + jumpstarter.v1.jumpstarter_pb2.StatusResponse, + ], + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.DialRequest, + jumpstarter.v1.jumpstarter_pb2.DialResponse, + ], + grpc.StreamUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.AuditStreamRequest, + google.protobuf.empty_pb2.Empty, + ], + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.GetLeaseRequest, + jumpstarter.v1.jumpstarter_pb2.GetLeaseResponse, + ], + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.RequestLeaseRequest, + jumpstarter.v1.jumpstarter_pb2.RequestLeaseResponse, + ], + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ReleaseLeaseRequest, + jumpstarter.v1.jumpstarter_pb2.ReleaseLeaseResponse, + ], + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ListLeasesRequest, + jumpstarter.v1.jumpstarter_pb2.ListLeasesResponse, + ], + ], channel: grpc.Channel) -> None: ... + + @typing.overload + def __init__(self: ControllerServiceStub[ + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.RegisterRequest, + jumpstarter.v1.jumpstarter_pb2.RegisterResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.UnregisterRequest, + jumpstarter.v1.jumpstarter_pb2.UnregisterResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ReportStatusRequest, + jumpstarter.v1.jumpstarter_pb2.ReportStatusResponse, + ], + grpc.aio.UnaryStreamMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ListenRequest, + jumpstarter.v1.jumpstarter_pb2.ListenResponse, + ], + grpc.aio.UnaryStreamMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.StatusRequest, + jumpstarter.v1.jumpstarter_pb2.StatusResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.DialRequest, + jumpstarter.v1.jumpstarter_pb2.DialResponse, + ], + grpc.aio.StreamUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.AuditStreamRequest, + google.protobuf.empty_pb2.Empty, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.GetLeaseRequest, + jumpstarter.v1.jumpstarter_pb2.GetLeaseResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.RequestLeaseRequest, + jumpstarter.v1.jumpstarter_pb2.RequestLeaseResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ReleaseLeaseRequest, + jumpstarter.v1.jumpstarter_pb2.ReleaseLeaseResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ListLeasesRequest, + jumpstarter.v1.jumpstarter_pb2.ListLeasesResponse, + ], + ], channel: grpc.aio.Channel) -> None: ... + + Register: _ControllerServiceRegisterType + """Exporter registration""" + + Unregister: _ControllerServiceUnregisterType + """Exporter disconnection + Disconnecting with bye will invalidate any existing router tokens + we will eventually have a mechanism to tell the router this token + has been invalidated + """ + + ReportStatus: _ControllerServiceReportStatusType + """Exporter status report + Allows exporters to report their own status to the controller + """ + + Listen: _ControllerServiceListenType + """Exporter listening + Returns stream tokens for accepting incoming client connections + """ + + Status: _ControllerServiceStatusType + """Exporter status + Returns lease status for the exporter + """ + + Dial: _ControllerServiceDialType + """Client connecting + Returns stream token for connecting to the desired exporter + Leases are checked before token issuance + """ + + AuditStream: _ControllerServiceAuditStreamType + """Audit events from the exporters + audit events are used to track the exporter's activity + """ + + GetLease: _ControllerServiceGetLeaseType + """Get Lease""" + + RequestLease: _ControllerServiceRequestLeaseType + """Request Lease""" + + ReleaseLease: _ControllerServiceReleaseLeaseType + """Release Lease""" + + ListLeases: _ControllerServiceListLeasesType + """List Leases""" + +ControllerServiceAsyncStub: typing_extensions.TypeAlias = ControllerServiceStub[ + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.RegisterRequest, + jumpstarter.v1.jumpstarter_pb2.RegisterResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.UnregisterRequest, + jumpstarter.v1.jumpstarter_pb2.UnregisterResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ReportStatusRequest, + jumpstarter.v1.jumpstarter_pb2.ReportStatusResponse, + ], + grpc.aio.UnaryStreamMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ListenRequest, + jumpstarter.v1.jumpstarter_pb2.ListenResponse, + ], + grpc.aio.UnaryStreamMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.StatusRequest, + jumpstarter.v1.jumpstarter_pb2.StatusResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.DialRequest, + jumpstarter.v1.jumpstarter_pb2.DialResponse, + ], + grpc.aio.StreamUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.AuditStreamRequest, + google.protobuf.empty_pb2.Empty, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.GetLeaseRequest, + jumpstarter.v1.jumpstarter_pb2.GetLeaseResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.RequestLeaseRequest, + jumpstarter.v1.jumpstarter_pb2.RequestLeaseResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ReleaseLeaseRequest, + jumpstarter.v1.jumpstarter_pb2.ReleaseLeaseResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ListLeasesRequest, + jumpstarter.v1.jumpstarter_pb2.ListLeasesResponse, + ], +] + +class ControllerServiceServicer(metaclass=abc.ABCMeta): + """A service where a exporter can connect to make itself available""" + + @abc.abstractmethod + def Register( + self, + request: jumpstarter.v1.jumpstarter_pb2.RegisterRequest, + context: _ServicerContext, + ) -> typing.Union[jumpstarter.v1.jumpstarter_pb2.RegisterResponse, collections.abc.Awaitable[jumpstarter.v1.jumpstarter_pb2.RegisterResponse]]: + """Exporter registration""" + + @abc.abstractmethod + def Unregister( + self, + request: jumpstarter.v1.jumpstarter_pb2.UnregisterRequest, + context: _ServicerContext, + ) -> typing.Union[jumpstarter.v1.jumpstarter_pb2.UnregisterResponse, collections.abc.Awaitable[jumpstarter.v1.jumpstarter_pb2.UnregisterResponse]]: + """Exporter disconnection + Disconnecting with bye will invalidate any existing router tokens + we will eventually have a mechanism to tell the router this token + has been invalidated + """ + + @abc.abstractmethod + def ReportStatus( + self, + request: jumpstarter.v1.jumpstarter_pb2.ReportStatusRequest, + context: _ServicerContext, + ) -> typing.Union[jumpstarter.v1.jumpstarter_pb2.ReportStatusResponse, collections.abc.Awaitable[jumpstarter.v1.jumpstarter_pb2.ReportStatusResponse]]: + """Exporter status report + Allows exporters to report their own status to the controller + """ + + @abc.abstractmethod + def Listen( + self, + request: jumpstarter.v1.jumpstarter_pb2.ListenRequest, + context: _ServicerContext, + ) -> typing.Union[collections.abc.Iterator[jumpstarter.v1.jumpstarter_pb2.ListenResponse], collections.abc.AsyncIterator[jumpstarter.v1.jumpstarter_pb2.ListenResponse]]: + """Exporter listening + Returns stream tokens for accepting incoming client connections + """ + + @abc.abstractmethod + def Status( + self, + request: jumpstarter.v1.jumpstarter_pb2.StatusRequest, + context: _ServicerContext, + ) -> typing.Union[collections.abc.Iterator[jumpstarter.v1.jumpstarter_pb2.StatusResponse], collections.abc.AsyncIterator[jumpstarter.v1.jumpstarter_pb2.StatusResponse]]: + """Exporter status + Returns lease status for the exporter + """ + + @abc.abstractmethod + def Dial( + self, + request: jumpstarter.v1.jumpstarter_pb2.DialRequest, + context: _ServicerContext, + ) -> typing.Union[jumpstarter.v1.jumpstarter_pb2.DialResponse, collections.abc.Awaitable[jumpstarter.v1.jumpstarter_pb2.DialResponse]]: + """Client connecting + Returns stream token for connecting to the desired exporter + Leases are checked before token issuance + """ + + @abc.abstractmethod + def AuditStream( + self, + request_iterator: _MaybeAsyncIterator[jumpstarter.v1.jumpstarter_pb2.AuditStreamRequest], + context: _ServicerContext, + ) -> typing.Union[google.protobuf.empty_pb2.Empty, collections.abc.Awaitable[google.protobuf.empty_pb2.Empty]]: + """Audit events from the exporters + audit events are used to track the exporter's activity + """ + + @abc.abstractmethod + def GetLease( + self, + request: jumpstarter.v1.jumpstarter_pb2.GetLeaseRequest, + context: _ServicerContext, + ) -> typing.Union[jumpstarter.v1.jumpstarter_pb2.GetLeaseResponse, collections.abc.Awaitable[jumpstarter.v1.jumpstarter_pb2.GetLeaseResponse]]: + """Get Lease""" + + @abc.abstractmethod + def RequestLease( + self, + request: jumpstarter.v1.jumpstarter_pb2.RequestLeaseRequest, + context: _ServicerContext, + ) -> typing.Union[jumpstarter.v1.jumpstarter_pb2.RequestLeaseResponse, collections.abc.Awaitable[jumpstarter.v1.jumpstarter_pb2.RequestLeaseResponse]]: + """Request Lease""" + + @abc.abstractmethod + def ReleaseLease( + self, + request: jumpstarter.v1.jumpstarter_pb2.ReleaseLeaseRequest, + context: _ServicerContext, + ) -> typing.Union[jumpstarter.v1.jumpstarter_pb2.ReleaseLeaseResponse, collections.abc.Awaitable[jumpstarter.v1.jumpstarter_pb2.ReleaseLeaseResponse]]: + """Release Lease""" + + @abc.abstractmethod + def ListLeases( + self, + request: jumpstarter.v1.jumpstarter_pb2.ListLeasesRequest, + context: _ServicerContext, + ) -> typing.Union[jumpstarter.v1.jumpstarter_pb2.ListLeasesResponse, collections.abc.Awaitable[jumpstarter.v1.jumpstarter_pb2.ListLeasesResponse]]: + """List Leases""" + +def add_ControllerServiceServicer_to_server(servicer: ControllerServiceServicer, server: typing.Union[grpc.Server, grpc.aio.Server]) -> None: ... + +_ExporterServiceGetReportType = typing_extensions.TypeVar( + '_ExporterServiceGetReportType', + grpc.UnaryUnaryMultiCallable[ + google.protobuf.empty_pb2.Empty, + jumpstarter.v1.jumpstarter_pb2.GetReportResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + google.protobuf.empty_pb2.Empty, + jumpstarter.v1.jumpstarter_pb2.GetReportResponse, + ], + default=grpc.UnaryUnaryMultiCallable[ + google.protobuf.empty_pb2.Empty, + jumpstarter.v1.jumpstarter_pb2.GetReportResponse, + ], +) + +_ExporterServiceDriverCallType = typing_extensions.TypeVar( + '_ExporterServiceDriverCallType', + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.DriverCallRequest, + jumpstarter.v1.jumpstarter_pb2.DriverCallResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.DriverCallRequest, + jumpstarter.v1.jumpstarter_pb2.DriverCallResponse, + ], + default=grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.DriverCallRequest, + jumpstarter.v1.jumpstarter_pb2.DriverCallResponse, + ], +) + +_ExporterServiceStreamingDriverCallType = typing_extensions.TypeVar( + '_ExporterServiceStreamingDriverCallType', + grpc.UnaryStreamMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.StreamingDriverCallRequest, + jumpstarter.v1.jumpstarter_pb2.StreamingDriverCallResponse, + ], + grpc.aio.UnaryStreamMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.StreamingDriverCallRequest, + jumpstarter.v1.jumpstarter_pb2.StreamingDriverCallResponse, + ], + default=grpc.UnaryStreamMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.StreamingDriverCallRequest, + jumpstarter.v1.jumpstarter_pb2.StreamingDriverCallResponse, + ], +) + +_ExporterServiceLogStreamType = typing_extensions.TypeVar( + '_ExporterServiceLogStreamType', + grpc.UnaryStreamMultiCallable[ + google.protobuf.empty_pb2.Empty, + jumpstarter.v1.jumpstarter_pb2.LogStreamResponse, + ], + grpc.aio.UnaryStreamMultiCallable[ + google.protobuf.empty_pb2.Empty, + jumpstarter.v1.jumpstarter_pb2.LogStreamResponse, + ], + default=grpc.UnaryStreamMultiCallable[ + google.protobuf.empty_pb2.Empty, + jumpstarter.v1.jumpstarter_pb2.LogStreamResponse, + ], +) + +_ExporterServiceResetType = typing_extensions.TypeVar( + '_ExporterServiceResetType', + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ResetRequest, + jumpstarter.v1.jumpstarter_pb2.ResetResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ResetRequest, + jumpstarter.v1.jumpstarter_pb2.ResetResponse, + ], + default=grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ResetRequest, + jumpstarter.v1.jumpstarter_pb2.ResetResponse, + ], +) + +_ExporterServiceGetStatusType = typing_extensions.TypeVar( + '_ExporterServiceGetStatusType', + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.GetStatusRequest, + jumpstarter.v1.jumpstarter_pb2.GetStatusResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.GetStatusRequest, + jumpstarter.v1.jumpstarter_pb2.GetStatusResponse, + ], + default=grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.GetStatusRequest, + jumpstarter.v1.jumpstarter_pb2.GetStatusResponse, + ], +) + +class ExporterServiceStub(typing.Generic[_ExporterServiceGetReportType, _ExporterServiceDriverCallType, _ExporterServiceStreamingDriverCallType, _ExporterServiceLogStreamType, _ExporterServiceResetType, _ExporterServiceGetStatusType]): + """A service a exporter can share locally to be used without a server + Channel/Call credentials are used to authenticate the client, and routing to the right exporter + """ + + @typing.overload + def __init__(self: ExporterServiceStub[ + grpc.UnaryUnaryMultiCallable[ + google.protobuf.empty_pb2.Empty, + jumpstarter.v1.jumpstarter_pb2.GetReportResponse, + ], + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.DriverCallRequest, + jumpstarter.v1.jumpstarter_pb2.DriverCallResponse, + ], + grpc.UnaryStreamMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.StreamingDriverCallRequest, + jumpstarter.v1.jumpstarter_pb2.StreamingDriverCallResponse, + ], + grpc.UnaryStreamMultiCallable[ + google.protobuf.empty_pb2.Empty, + jumpstarter.v1.jumpstarter_pb2.LogStreamResponse, + ], + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ResetRequest, + jumpstarter.v1.jumpstarter_pb2.ResetResponse, + ], + grpc.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.GetStatusRequest, + jumpstarter.v1.jumpstarter_pb2.GetStatusResponse, + ], + ], channel: grpc.Channel) -> None: ... + + @typing.overload + def __init__(self: ExporterServiceStub[ + grpc.aio.UnaryUnaryMultiCallable[ + google.protobuf.empty_pb2.Empty, + jumpstarter.v1.jumpstarter_pb2.GetReportResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.DriverCallRequest, + jumpstarter.v1.jumpstarter_pb2.DriverCallResponse, + ], + grpc.aio.UnaryStreamMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.StreamingDriverCallRequest, + jumpstarter.v1.jumpstarter_pb2.StreamingDriverCallResponse, + ], + grpc.aio.UnaryStreamMultiCallable[ + google.protobuf.empty_pb2.Empty, + jumpstarter.v1.jumpstarter_pb2.LogStreamResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ResetRequest, + jumpstarter.v1.jumpstarter_pb2.ResetResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.GetStatusRequest, + jumpstarter.v1.jumpstarter_pb2.GetStatusResponse, + ], + ], channel: grpc.aio.Channel) -> None: ... + + GetReport: _ExporterServiceGetReportType + """Exporter registration""" + + DriverCall: _ExporterServiceDriverCallType + + StreamingDriverCall: _ExporterServiceStreamingDriverCallType + + LogStream: _ExporterServiceLogStreamType + + Reset: _ExporterServiceResetType + + GetStatus: _ExporterServiceGetStatusType + +ExporterServiceAsyncStub: typing_extensions.TypeAlias = ExporterServiceStub[ + grpc.aio.UnaryUnaryMultiCallable[ + google.protobuf.empty_pb2.Empty, + jumpstarter.v1.jumpstarter_pb2.GetReportResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.DriverCallRequest, + jumpstarter.v1.jumpstarter_pb2.DriverCallResponse, + ], + grpc.aio.UnaryStreamMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.StreamingDriverCallRequest, + jumpstarter.v1.jumpstarter_pb2.StreamingDriverCallResponse, + ], + grpc.aio.UnaryStreamMultiCallable[ + google.protobuf.empty_pb2.Empty, + jumpstarter.v1.jumpstarter_pb2.LogStreamResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.ResetRequest, + jumpstarter.v1.jumpstarter_pb2.ResetResponse, + ], + grpc.aio.UnaryUnaryMultiCallable[ + jumpstarter.v1.jumpstarter_pb2.GetStatusRequest, + jumpstarter.v1.jumpstarter_pb2.GetStatusResponse, + ], +] + +class ExporterServiceServicer(metaclass=abc.ABCMeta): + """A service a exporter can share locally to be used without a server + Channel/Call credentials are used to authenticate the client, and routing to the right exporter + """ + + @abc.abstractmethod + def GetReport( + self, + request: google.protobuf.empty_pb2.Empty, + context: _ServicerContext, + ) -> typing.Union[jumpstarter.v1.jumpstarter_pb2.GetReportResponse, collections.abc.Awaitable[jumpstarter.v1.jumpstarter_pb2.GetReportResponse]]: + """Exporter registration""" + + @abc.abstractmethod + def DriverCall( + self, + request: jumpstarter.v1.jumpstarter_pb2.DriverCallRequest, + context: _ServicerContext, + ) -> typing.Union[jumpstarter.v1.jumpstarter_pb2.DriverCallResponse, collections.abc.Awaitable[jumpstarter.v1.jumpstarter_pb2.DriverCallResponse]]: ... + + @abc.abstractmethod + def StreamingDriverCall( + self, + request: jumpstarter.v1.jumpstarter_pb2.StreamingDriverCallRequest, + context: _ServicerContext, + ) -> typing.Union[collections.abc.Iterator[jumpstarter.v1.jumpstarter_pb2.StreamingDriverCallResponse], collections.abc.AsyncIterator[jumpstarter.v1.jumpstarter_pb2.StreamingDriverCallResponse]]: ... + + @abc.abstractmethod + def LogStream( + self, + request: google.protobuf.empty_pb2.Empty, + context: _ServicerContext, + ) -> typing.Union[collections.abc.Iterator[jumpstarter.v1.jumpstarter_pb2.LogStreamResponse], collections.abc.AsyncIterator[jumpstarter.v1.jumpstarter_pb2.LogStreamResponse]]: ... + + @abc.abstractmethod + def Reset( + self, + request: jumpstarter.v1.jumpstarter_pb2.ResetRequest, + context: _ServicerContext, + ) -> typing.Union[jumpstarter.v1.jumpstarter_pb2.ResetResponse, collections.abc.Awaitable[jumpstarter.v1.jumpstarter_pb2.ResetResponse]]: ... + + @abc.abstractmethod + def GetStatus( + self, + request: jumpstarter.v1.jumpstarter_pb2.GetStatusRequest, + context: _ServicerContext, + ) -> typing.Union[jumpstarter.v1.jumpstarter_pb2.GetStatusResponse, collections.abc.Awaitable[jumpstarter.v1.jumpstarter_pb2.GetStatusResponse]]: ... + +def add_ExporterServiceServicer_to_server(servicer: ExporterServiceServicer, server: typing.Union[grpc.Server, grpc.aio.Server]) -> None: ... diff --git a/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/kubernetes_pb2.pyi b/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/kubernetes_pb2.pyi new file mode 100644 index 000000000..e07fb6250 --- /dev/null +++ b/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/kubernetes_pb2.pyi @@ -0,0 +1,148 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +Copyright 2024 The Jumpstarter Authors""" + +import builtins +import collections.abc +import google.protobuf.descriptor +import google.protobuf.internal.containers +import google.protobuf.message +import sys +import typing + +if sys.version_info >= (3, 10): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +@typing.final +class LabelSelectorRequirement(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + OPERATOR_FIELD_NUMBER: builtins.int + VALUES_FIELD_NUMBER: builtins.int + key: builtins.str + operator: builtins.str + @property + def values(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ... + def __init__( + self, + *, + key: builtins.str = ..., + operator: builtins.str = ..., + values: collections.abc.Iterable[builtins.str] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["key", b"key", "operator", b"operator", "values", b"values"]) -> None: ... + +Global___LabelSelectorRequirement: typing_extensions.TypeAlias = LabelSelectorRequirement + +@typing.final +class LabelSelector(google.protobuf.message.Message): + """Reference: https://kubernetes.io/docs/reference/kubernetes-api/common-definitions/label-selector/""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + @typing.final + class MatchLabelsEntry(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + KEY_FIELD_NUMBER: builtins.int + VALUE_FIELD_NUMBER: builtins.int + key: builtins.str + value: builtins.str + def __init__( + self, + *, + key: builtins.str = ..., + value: builtins.str = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["key", b"key", "value", b"value"]) -> None: ... + + MATCH_EXPRESSIONS_FIELD_NUMBER: builtins.int + MATCH_LABELS_FIELD_NUMBER: builtins.int + @property + def match_expressions(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[Global___LabelSelectorRequirement]: ... + @property + def match_labels(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: ... + def __init__( + self, + *, + match_expressions: collections.abc.Iterable[Global___LabelSelectorRequirement] | None = ..., + match_labels: collections.abc.Mapping[builtins.str, builtins.str] | None = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["match_expressions", b"match_expressions", "match_labels", b"match_labels"]) -> None: ... + +Global___LabelSelector: typing_extensions.TypeAlias = LabelSelector + +@typing.final +class Time(google.protobuf.message.Message): + """Reference: https://github.com/kubernetes/kubernetes/blob/v1.31.1/staging/src/k8s.io/apimachinery/pkg/apis/meta/v1/generated.proto""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + SECONDS_FIELD_NUMBER: builtins.int + NANOS_FIELD_NUMBER: builtins.int + seconds: builtins.int + nanos: builtins.int + def __init__( + self, + *, + seconds: builtins.int | None = ..., + nanos: builtins.int | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["_nanos", b"_nanos", "_seconds", b"_seconds", "nanos", b"nanos", "seconds", b"seconds"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["_nanos", b"_nanos", "_seconds", b"_seconds", "nanos", b"nanos", "seconds", b"seconds"]) -> None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_nanos", b"_nanos"]) -> typing.Literal["nanos"] | None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_seconds", b"_seconds"]) -> typing.Literal["seconds"] | None: ... + +Global___Time: typing_extensions.TypeAlias = Time + +@typing.final +class Condition(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + TYPE_FIELD_NUMBER: builtins.int + STATUS_FIELD_NUMBER: builtins.int + OBSERVEDGENERATION_FIELD_NUMBER: builtins.int + LASTTRANSITIONTIME_FIELD_NUMBER: builtins.int + REASON_FIELD_NUMBER: builtins.int + MESSAGE_FIELD_NUMBER: builtins.int + type: builtins.str + status: builtins.str + observedGeneration: builtins.int + reason: builtins.str + message: builtins.str + @property + def lastTransitionTime(self) -> Global___Time: ... + def __init__( + self, + *, + type: builtins.str | None = ..., + status: builtins.str | None = ..., + observedGeneration: builtins.int | None = ..., + lastTransitionTime: Global___Time | None = ..., + reason: builtins.str | None = ..., + message: builtins.str | None = ..., + ) -> None: ... + def HasField(self, field_name: typing.Literal["_lastTransitionTime", b"_lastTransitionTime", "_message", b"_message", "_observedGeneration", b"_observedGeneration", "_reason", b"_reason", "_status", b"_status", "_type", b"_type", "lastTransitionTime", b"lastTransitionTime", "message", b"message", "observedGeneration", b"observedGeneration", "reason", b"reason", "status", b"status", "type", b"type"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["_lastTransitionTime", b"_lastTransitionTime", "_message", b"_message", "_observedGeneration", b"_observedGeneration", "_reason", b"_reason", "_status", b"_status", "_type", b"_type", "lastTransitionTime", b"lastTransitionTime", "message", b"message", "observedGeneration", b"observedGeneration", "reason", b"reason", "status", b"status", "type", b"type"]) -> None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_lastTransitionTime", b"_lastTransitionTime"]) -> typing.Literal["lastTransitionTime"] | None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_message", b"_message"]) -> typing.Literal["message"] | None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_observedGeneration", b"_observedGeneration"]) -> typing.Literal["observedGeneration"] | None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_reason", b"_reason"]) -> typing.Literal["reason"] | None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_status", b"_status"]) -> typing.Literal["status"] | None: ... + @typing.overload + def WhichOneof(self, oneof_group: typing.Literal["_type", b"_type"]) -> typing.Literal["type"] | None: ... + +Global___Condition: typing_extensions.TypeAlias = Condition diff --git a/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/kubernetes_pb2_grpc.pyi b/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/kubernetes_pb2_grpc.pyi new file mode 100644 index 000000000..6aac97060 --- /dev/null +++ b/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/kubernetes_pb2_grpc.pyi @@ -0,0 +1,20 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +Copyright 2024 The Jumpstarter Authors""" + +import abc +import collections.abc +import grpc +import grpc.aio +import typing + +_T = typing.TypeVar("_T") + +class _MaybeAsyncIterator(collections.abc.AsyncIterator[_T], collections.abc.Iterator[_T], metaclass=abc.ABCMeta): ... + +class _ServicerContext(grpc.ServicerContext, grpc.aio.ServicerContext): # type: ignore[misc, type-arg] + ... + +GRPC_GENERATED_VERSION: str +GRPC_VERSION: str diff --git a/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/router_pb2.pyi b/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/router_pb2.pyi new file mode 100644 index 000000000..905fc6b88 --- /dev/null +++ b/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/router_pb2.pyi @@ -0,0 +1,73 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +Copyright 2024 The Jumpstarter Authors""" + +import builtins +import google.protobuf.descriptor +import google.protobuf.internal.enum_type_wrapper +import google.protobuf.message +import sys +import typing + +if sys.version_info >= (3, 10): + import typing as typing_extensions +else: + import typing_extensions + +DESCRIPTOR: google.protobuf.descriptor.FileDescriptor + +class _FrameType: + ValueType = typing.NewType("ValueType", builtins.int) + V: typing_extensions.TypeAlias = ValueType + +class _FrameTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_FrameType.ValueType], builtins.type): + DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor + FRAME_TYPE_DATA: _FrameType.ValueType # 0 + FRAME_TYPE_RST_STREAM: _FrameType.ValueType # 3 + FRAME_TYPE_PING: _FrameType.ValueType # 6 + FRAME_TYPE_GOAWAY: _FrameType.ValueType # 7 + +class FrameType(_FrameType, metaclass=_FrameTypeEnumTypeWrapper): ... + +FRAME_TYPE_DATA: FrameType.ValueType # 0 +FRAME_TYPE_RST_STREAM: FrameType.ValueType # 3 +FRAME_TYPE_PING: FrameType.ValueType # 6 +FRAME_TYPE_GOAWAY: FrameType.ValueType # 7 +Global___FrameType: typing_extensions.TypeAlias = FrameType + +@typing.final +class StreamRequest(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PAYLOAD_FIELD_NUMBER: builtins.int + FRAME_TYPE_FIELD_NUMBER: builtins.int + payload: builtins.bytes + frame_type: Global___FrameType.ValueType + def __init__( + self, + *, + payload: builtins.bytes = ..., + frame_type: Global___FrameType.ValueType = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["frame_type", b"frame_type", "payload", b"payload"]) -> None: ... + +Global___StreamRequest: typing_extensions.TypeAlias = StreamRequest + +@typing.final +class StreamResponse(google.protobuf.message.Message): + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + PAYLOAD_FIELD_NUMBER: builtins.int + FRAME_TYPE_FIELD_NUMBER: builtins.int + payload: builtins.bytes + frame_type: Global___FrameType.ValueType + def __init__( + self, + *, + payload: builtins.bytes = ..., + frame_type: Global___FrameType.ValueType = ..., + ) -> None: ... + def ClearField(self, field_name: typing.Literal["frame_type", b"frame_type", "payload", b"payload"]) -> None: ... + +Global___StreamResponse: typing_extensions.TypeAlias = StreamResponse diff --git a/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/router_pb2_grpc.pyi b/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/router_pb2_grpc.pyi new file mode 100644 index 000000000..032778846 --- /dev/null +++ b/packages/jumpstarter-protocol/jumpstarter_protocol/jumpstarter/v1/router_pb2_grpc.pyi @@ -0,0 +1,96 @@ +""" +@generated by mypy-protobuf. Do not edit manually! +isort:skip_file +Copyright 2024 The Jumpstarter Authors""" + +import abc +import collections.abc +import grpc +import grpc.aio +import jumpstarter.v1.router_pb2 +import sys +import typing + +if sys.version_info >= (3, 13): + import typing as typing_extensions +else: + import typing_extensions + +_T = typing.TypeVar("_T") + +class _MaybeAsyncIterator(collections.abc.AsyncIterator[_T], collections.abc.Iterator[_T], metaclass=abc.ABCMeta): ... + +class _ServicerContext(grpc.ServicerContext, grpc.aio.ServicerContext): # type: ignore[misc, type-arg] + ... + +GRPC_GENERATED_VERSION: str +GRPC_VERSION: str +_RouterServiceStreamType = typing_extensions.TypeVar( + '_RouterServiceStreamType', + grpc.StreamStreamMultiCallable[ + jumpstarter.v1.router_pb2.StreamRequest, + jumpstarter.v1.router_pb2.StreamResponse, + ], + grpc.aio.StreamStreamMultiCallable[ + jumpstarter.v1.router_pb2.StreamRequest, + jumpstarter.v1.router_pb2.StreamResponse, + ], + default=grpc.StreamStreamMultiCallable[ + jumpstarter.v1.router_pb2.StreamRequest, + jumpstarter.v1.router_pb2.StreamResponse, + ], +) + +class RouterServiceStub(typing.Generic[_RouterServiceStreamType]): + """StreamService + Claims: + iss: jumpstarter controller + aud: jumpstarter router + sub: jumpstarter client/exporter + stream: stream id + """ + + @typing.overload + def __init__(self: RouterServiceStub[ + grpc.StreamStreamMultiCallable[ + jumpstarter.v1.router_pb2.StreamRequest, + jumpstarter.v1.router_pb2.StreamResponse, + ], + ], channel: grpc.Channel) -> None: ... + + @typing.overload + def __init__(self: RouterServiceStub[ + grpc.aio.StreamStreamMultiCallable[ + jumpstarter.v1.router_pb2.StreamRequest, + jumpstarter.v1.router_pb2.StreamResponse, + ], + ], channel: grpc.aio.Channel) -> None: ... + + Stream: _RouterServiceStreamType + """Stream connects caller to another caller of the same stream""" + +RouterServiceAsyncStub: typing_extensions.TypeAlias = RouterServiceStub[ + grpc.aio.StreamStreamMultiCallable[ + jumpstarter.v1.router_pb2.StreamRequest, + jumpstarter.v1.router_pb2.StreamResponse, + ], +] + +class RouterServiceServicer(metaclass=abc.ABCMeta): + """StreamService + Claims: + iss: jumpstarter controller + aud: jumpstarter router + sub: jumpstarter client/exporter + stream: stream id + """ + + @abc.abstractmethod + def Stream( + self, + request_iterator: _MaybeAsyncIterator[jumpstarter.v1.router_pb2.StreamRequest], + context: _ServicerContext, + ) -> typing.Union[collections.abc.Iterator[jumpstarter.v1.router_pb2.StreamResponse], collections.abc.AsyncIterator[jumpstarter.v1.router_pb2.StreamResponse]]: + """Stream connects caller to another caller of the same stream""" + +def add_RouterServiceServicer_to_server(servicer: RouterServiceServicer, server: typing.Union[grpc.Server, grpc.aio.Server]) -> None: ... diff --git a/packages/jumpstarter-testing/jumpstarter_testing/pytest_test.py b/packages/jumpstarter-testing/jumpstarter_testing/pytest_test.py index f1697cc2c..073bdb89c 100644 --- a/packages/jumpstarter-testing/jumpstarter_testing/pytest_test.py +++ b/packages/jumpstarter-testing/jumpstarter_testing/pytest_test.py @@ -1,6 +1,7 @@ from jumpstarter_driver_power.driver import MockPower from pytest import Pytester +from jumpstarter.common import ExporterStatus from jumpstarter.config.env import JMP_DRIVERS_ALLOW, JUMPSTARTER_HOST from jumpstarter.exporter import Session @@ -18,6 +19,8 @@ def test_simple(self, client): with Session(root_device=MockPower()) as session: with session.serve_unix() as path: + # For local testing, set status to LEASE_READY since there's no lease/hook flow + session.update_status(ExporterStatus.LEASE_READY) monkeypatch.setenv(JUMPSTARTER_HOST, str(path)) monkeypatch.setenv(JMP_DRIVERS_ALLOW, "UNSAFE") result = pytester.runpytest() diff --git a/packages/jumpstarter/jumpstarter/client/core.py b/packages/jumpstarter/jumpstarter/client/core.py index 3befe92eb..85a7267e5 100644 --- a/packages/jumpstarter/jumpstarter/client/core.py +++ b/packages/jumpstarter/jumpstarter/client/core.py @@ -2,6 +2,8 @@ Base classes for drivers and driver clients """ +from __future__ import annotations + import logging from contextlib import asynccontextmanager from dataclasses import dataclass, field @@ -14,7 +16,7 @@ from jumpstarter_protocol import jumpstarter_pb2, jumpstarter_pb2_grpc, router_pb2_grpc from rich.logging import RichHandler -from jumpstarter.common import Metadata +from jumpstarter.common import ExporterStatus, Metadata from jumpstarter.common.exceptions import JumpstarterException from jumpstarter.common.resources import ResourceMetadata from jumpstarter.common.serde import decode_value, encode_value @@ -48,6 +50,12 @@ class DriverInvalidArgument(DriverError, ValueError): """ +class ExporterNotReady(DriverError): + """ + Raised when the exporter is not ready to accept driver calls + """ + + @dataclass(kw_only=True) class AsyncDriverClient( Metadata, @@ -76,9 +84,40 @@ def __post_init__(self): handler = RichHandler() self.logger.addHandler(handler) + async def check_exporter_status(self): + """Check if the exporter is ready to accept driver calls. + + Allows driver commands during hook execution (BEFORE_LEASE_HOOK, AFTER_LEASE_HOOK) + in addition to the normal LEASE_READY status. This enables hooks to interact + with drivers via the `j` CLI for automation use cases. + """ + # Statuses that allow driver commands + ALLOWED_STATUSES = { + ExporterStatus.LEASE_READY, + ExporterStatus.BEFORE_LEASE_HOOK, + ExporterStatus.AFTER_LEASE_HOOK, + } + + try: + response = await self.stub.GetStatus(jumpstarter_pb2.GetStatusRequest()) + status = ExporterStatus.from_proto(response.status) + + if status not in ALLOWED_STATUSES: + raise ExporterNotReady(f"Exporter status is {status}: {response.message}") + + except AioRpcError as e: + # If GetStatus is not implemented, assume ready for backward compatibility + if e.code() == StatusCode.UNIMPLEMENTED: + self.logger.debug("GetStatus not implemented, assuming exporter is ready") + return + raise DriverError(f"Failed to check exporter status: {e.details()}") from e + async def call_async(self, method, *args): """Make DriverCall by method name and arguments""" + # Check exporter status before making the call + await self.check_exporter_status() + request = jumpstarter_pb2.DriverCallRequest( uuid=str(self.uuid), method=method, @@ -105,6 +144,9 @@ async def call_async(self, method, *args): async def streamingcall_async(self, method, *args): """Make StreamingDriverCall by method name and arguments""" + # Check exporter status before making the call + await self.check_exporter_status() + request = jumpstarter_pb2.StreamingDriverCallRequest( uuid=str(self.uuid), method=method, diff --git a/packages/jumpstarter/jumpstarter/client/grpc.py b/packages/jumpstarter/jumpstarter/client/grpc.py index 445f255c6..a8c352d9a 100644 --- a/packages/jumpstarter/jumpstarter/client/grpc.py +++ b/packages/jumpstarter/jumpstarter/client/grpc.py @@ -5,7 +5,6 @@ from dataclasses import InitVar, dataclass, field from datetime import datetime, timedelta from types import SimpleNamespace -from typing import Any from google.protobuf import duration_pb2, field_mask_pb2, json_format, timestamp_pb2 from grpc import ChannelConnectivity @@ -13,6 +12,7 @@ from jumpstarter_protocol import client_pb2, client_pb2_grpc, jumpstarter_pb2_grpc, kubernetes_pb2, router_pb2_grpc from pydantic import BaseModel, ConfigDict, Field, field_serializer +from jumpstarter.common import ExporterStatus from jumpstarter.common.grpc import translate_grpc_exceptions @@ -20,6 +20,7 @@ class WithOptions: show_online: bool = False show_leases: bool = False + show_status: bool = False def add_display_columns(table, options: WithOptions = None): @@ -28,6 +29,8 @@ def add_display_columns(table, options: WithOptions = None): table.add_column("NAME") if options.show_online: table.add_column("ONLINE") + if options.show_status: + table.add_column("STATUS") table.add_column("LABELS") if options.show_leases: table.add_column("LEASED BY") @@ -42,6 +45,9 @@ def add_exporter_row(table, exporter, options: WithOptions = None, lease_info: t row_data.append(exporter.name) if options.show_online: row_data.append("yes" if exporter.online else "no") + if options.show_status: + status_str = str(exporter.status) if exporter.status else "UNKNOWN" + row_data.append(status_str) row_data.append(",".join(("{}={}".format(k, v) for k, v in sorted(exporter.labels.items())))) if options.show_leases: if lease_info: @@ -81,12 +87,16 @@ class Exporter(BaseModel): name: str labels: dict[str, str] online: bool = False + status: ExporterStatus | None = None lease: Lease | None = None @classmethod def from_protobuf(cls, data: client_pb2.Exporter) -> Exporter: namespace, name = parse_exporter_identifier(data.name) - return cls(namespace=namespace, name=name, labels=data.labels, online=data.online) + status = None + if hasattr(data, "status") and data.status: + status = ExporterStatus.from_proto(data.status) + return cls(namespace=namespace, name=name, labels=data.labels, online=data.online, status=status) @classmethod def rich_add_columns(cls, table, options: WithOptions = None): @@ -244,6 +254,7 @@ class ExporterList(BaseModel): next_page_token: str | None = Field(exclude=True) include_online: bool = Field(default=False, exclude=True) include_leases: bool = Field(default=False, exclude=True) + include_status: bool = Field(default=False, exclude=True) @classmethod def from_protobuf(cls, data: client_pb2.ListExportersResponse) -> ExporterList: @@ -253,11 +264,15 @@ def from_protobuf(cls, data: client_pb2.ListExportersResponse) -> ExporterList: ) def rich_add_columns(self, table): - options = WithOptions(show_online=self.include_online, show_leases=self.include_leases) + options = WithOptions( + show_online=self.include_online, show_leases=self.include_leases, show_status=self.include_status + ) Exporter.rich_add_columns(table, options) def rich_add_rows(self, table): - options = WithOptions(show_online=self.include_online, show_leases=self.include_leases) + options = WithOptions( + show_online=self.include_online, show_leases=self.include_leases, show_status=self.include_status + ) for exporter in self.exporters: exporter.rich_add_rows(table, options) @@ -274,6 +289,8 @@ def model_dump_json(self, **kwargs): exclude_fields.add("lease") if not self.include_online: exclude_fields.add("online") + if not self.include_status: + exclude_fields.add("status") data = {"exporters": [exporter.model_dump(mode="json", exclude=exclude_fields) for exporter in self.exporters]} return json.dumps(data, **json_kwargs) @@ -284,6 +301,8 @@ def model_dump(self, **kwargs): exclude_fields.add("lease") if not self.include_online: exclude_fields.add("online") + if not self.include_status: + exclude_fields.add("status") return {"exporters": [exporter.model_dump(mode="json", exclude=exclude_fields) for exporter in self.exporters]} @@ -469,7 +488,7 @@ class MultipathExporterStub: channels: InitVar[list[Channel]] - __stubs: dict[Channel, Any] = field(init=False, default_factory=OrderedDict) + __stubs: dict[Channel, SimpleNamespace] = field(init=False, default_factory=OrderedDict) def __post_init__(self, channels): for channel in channels: diff --git a/packages/jumpstarter/jumpstarter/common/__init__.py b/packages/jumpstarter/jumpstarter/common/__init__.py index 13058cb09..8d6ba38bd 100644 --- a/packages/jumpstarter/jumpstarter/common/__init__.py +++ b/packages/jumpstarter/jumpstarter/common/__init__.py @@ -1,4 +1,22 @@ +from .enums import ExporterStatus, LogSource from .metadata import Metadata from .tempfile import TemporarySocket, TemporaryTcpListener, TemporaryUnixListener +from .types import ( + AsyncChannel, + ControllerStub, + ExporterStub, + RouterStub, +) -__all__ = ["Metadata", "TemporarySocket", "TemporaryUnixListener", "TemporaryTcpListener"] +__all__ = [ + "AsyncChannel", + "ControllerStub", + "ExporterStatus", + "ExporterStub", + "LogSource", + "Metadata", + "RouterStub", + "TemporarySocket", + "TemporaryTcpListener", + "TemporaryUnixListener", +] diff --git a/packages/jumpstarter/jumpstarter/common/enums.py b/packages/jumpstarter/jumpstarter/common/enums.py new file mode 100644 index 000000000..ce6a79c2b --- /dev/null +++ b/packages/jumpstarter/jumpstarter/common/enums.py @@ -0,0 +1,76 @@ +"""Human-readable enum wrappers for protobuf-generated constants.""" + +from enum import IntEnum + +from jumpstarter_protocol.jumpstarter.v1 import common_pb2 + + +class ExporterStatus(IntEnum): + """Exporter status states.""" + + UNSPECIFIED = common_pb2.EXPORTER_STATUS_UNSPECIFIED + """Unknown/unspecified exporter status""" + + OFFLINE = common_pb2.EXPORTER_STATUS_OFFLINE + """The exporter is currently offline""" + + AVAILABLE = common_pb2.EXPORTER_STATUS_AVAILABLE + """Exporter is available to be leased""" + + BEFORE_LEASE_HOOK = common_pb2.EXPORTER_STATUS_BEFORE_LEASE_HOOK + """Exporter is leased, but currently executing before lease hook""" + + LEASE_READY = common_pb2.EXPORTER_STATUS_LEASE_READY + """Exporter is leased and ready to accept commands""" + + AFTER_LEASE_HOOK = common_pb2.EXPORTER_STATUS_AFTER_LEASE_HOOK + """Lease was releaseed, but exporter is executing after lease hook""" + + BEFORE_LEASE_HOOK_FAILED = common_pb2.EXPORTER_STATUS_BEFORE_LEASE_HOOK_FAILED + """The before lease hook failed and the exporter is no longer available""" + + AFTER_LEASE_HOOK_FAILED = common_pb2.EXPORTER_STATUS_AFTER_LEASE_HOOK_FAILED + """The after lease hook failed and the exporter is no longer available""" + + def __str__(self): + return self.name + + @classmethod + def from_proto(cls, value: int) -> "ExporterStatus": + """Convert from protobuf integer to enum.""" + return cls(value) + + def to_proto(self) -> int: + """Convert to protobuf integer.""" + return self.value + + +class LogSource(IntEnum): + """Log source types.""" + + UNSPECIFIED = common_pb2.LOG_SOURCE_UNSPECIFIED + """Unspecified/unknown log source""" + + DRIVER = common_pb2.LOG_SOURCE_DRIVER + """Logs produced by a Jumpstarter driver""" + + BEFORE_LEASE_HOOK = common_pb2.LOG_SOURCE_BEFORE_LEASE_HOOK + """Logs produced by a before lease hook""" + + AFTER_LEASE_HOOK = common_pb2.LOG_SOURCE_AFTER_LEASE_HOOK + """Logs produced by an after lease hook""" + + SYSTEM = common_pb2.LOG_SOURCE_SYSTEM + """System/exporter logs""" + + def __str__(self): + return self.name + + @classmethod + def from_proto(cls, value: int) -> "LogSource": + """Convert from protobuf integer to enum.""" + return cls(value) + + def to_proto(self) -> int: + """Convert to protobuf integer.""" + return self.value diff --git a/packages/jumpstarter/jumpstarter/common/types.py b/packages/jumpstarter/jumpstarter/common/types.py new file mode 100644 index 000000000..fb1104920 --- /dev/null +++ b/packages/jumpstarter/jumpstarter/common/types.py @@ -0,0 +1,25 @@ +"""Type aliases for gRPC and Protobuf types.""" + +from typing import TYPE_CHECKING, TypeAlias + +from grpc.aio import Channel +from jumpstarter_protocol import jumpstarter_pb2_grpc, router_pb2_grpc + +# Stub type aliases (the generic Stub classes work for both sync and async) +ExporterStub: TypeAlias = jumpstarter_pb2_grpc.ExporterServiceStub +RouterStub: TypeAlias = router_pb2_grpc.RouterServiceStub +ControllerStub: TypeAlias = jumpstarter_pb2_grpc.ControllerServiceStub + +# Channel type alias +AsyncChannel: TypeAlias = Channel + +# Async stub type aliases are only available for type checking (defined in .pyi files) +if TYPE_CHECKING: + pass + +__all__ = [ + "AsyncChannel", + "ControllerStub", + "ExporterStub", + "RouterStub", +] diff --git a/packages/jumpstarter/jumpstarter/common/utils.py b/packages/jumpstarter/jumpstarter/common/utils.py index dac73cad0..c23a8e46b 100644 --- a/packages/jumpstarter/jumpstarter/common/utils.py +++ b/packages/jumpstarter/jumpstarter/common/utils.py @@ -5,22 +5,29 @@ from datetime import timedelta from functools import partial from subprocess import Popen +from typing import TYPE_CHECKING from anyio.from_thread import BlockingPortal, start_blocking_portal from jumpstarter.client import client_from_path from jumpstarter.config.env import JMP_DRIVERS_ALLOW, JUMPSTARTER_HOST -from jumpstarter.driver import Driver from jumpstarter.exporter import Session from jumpstarter.utils.env import env +if TYPE_CHECKING: + from jumpstarter.driver import Driver + __all__ = ["env"] @asynccontextmanager -async def serve_async(root_device: Driver, portal: BlockingPortal, stack: ExitStack): +async def serve_async(root_device: "Driver", portal: BlockingPortal, stack: ExitStack): + from jumpstarter.common import ExporterStatus + with Session(root_device=root_device) as session: async with session.serve_unix_async() as path: + # For local testing, set status to LEASE_READY since there's no lease/hook flow + session.update_status(ExporterStatus.LEASE_READY) # SAFETY: the root_device instance is constructed locally thus considered trusted async with client_from_path(path, portal, stack, allow=[], unsafe=True) as client: try: @@ -31,7 +38,7 @@ async def serve_async(root_device: Driver, portal: BlockingPortal, stack: ExitSt @contextmanager -def serve(root_device: Driver): +def serve(root_device: "Driver"): with start_blocking_portal() as portal: with ExitStack() as stack: with portal.wrap_async_context_manager(serve_async(root_device, portal, stack)) as client: diff --git a/packages/jumpstarter/jumpstarter/config/client.py b/packages/jumpstarter/jumpstarter/config/client.py index 97f92c1ec..c2f7ac196 100644 --- a/packages/jumpstarter/jumpstarter/config/client.py +++ b/packages/jumpstarter/jumpstarter/config/client.py @@ -120,7 +120,7 @@ class ClientConfigV1Alpha1(BaseSettings): leases: ClientConfigV1Alpha1Lease = Field(default_factory=ClientConfigV1Alpha1Lease) - async def channel(self): + async def channel(self) -> grpc.aio.Channel: if self.endpoint is None or self.token is None: raise ConfigurationError("endpoint or token not set in client config") @@ -160,12 +160,14 @@ async def list_exporters( filter: str | None = None, include_leases: bool = False, include_online: bool = False, + include_status: bool = False, ): svc = ClientService(channel=await self.channel(), namespace=self.metadata.namespace) exporters_response = await svc.ListExporters(page_size=page_size, page_token=page_token, filter=filter) - # Set the include_online flag for display purposes + # Set the include flags for display purposes exporters_response.include_online = include_online + exporters_response.include_status = include_status if not include_leases: return exporters_response diff --git a/packages/jumpstarter/jumpstarter/config/exporter.py b/packages/jumpstarter/jumpstarter/config/exporter.py index efd4724b6..e70b00d57 100644 --- a/packages/jumpstarter/jumpstarter/config/exporter.py +++ b/packages/jumpstarter/jumpstarter/config/exporter.py @@ -2,7 +2,7 @@ from contextlib import asynccontextmanager, contextmanager, suppress from pathlib import Path -from typing import Any, ClassVar, Literal, Optional, Self +from typing import TYPE_CHECKING, Any, ClassVar, Literal, Optional, Self import grpc import yaml @@ -15,7 +15,39 @@ from jumpstarter.common.exceptions import ConfigurationError from jumpstarter.common.grpc import aio_secure_channel, ssl_channel_credentials from jumpstarter.common.importlib import import_class -from jumpstarter.driver import Driver + +if TYPE_CHECKING: + from jumpstarter.driver import Driver + + +class HookInstanceConfigV1Alpha1(BaseModel): + """Configuration for a specific lifecycle hook.""" + + model_config = ConfigDict(populate_by_name=True) + + script: str = Field(alias="script", description="The j script to execute for this hook") + timeout: int = Field(default=120, description="The hook execution timeout in seconds (default: 120s)") + on_failure: Literal[ + "warn", + "endLease", + "exit", + ] = Field( + default="warn", + alias="onFailure", + description=( + "Action to take when the expected exit code is not returned: 'endLease' to end the lease, " + "'exit' takes the exporter offline and ends the lease, 'warn' continues and prints a warning" + ), + ) + + +class HookConfigV1Alpha1(BaseModel): + """Configuration for lifecycle hooks.""" + + model_config = ConfigDict(populate_by_name=True) + + before_lease: HookInstanceConfigV1Alpha1 | None = Field(default=None, alias="beforeLease") + after_lease: HookInstanceConfigV1Alpha1 | None = Field(default=None, alias="afterLease") class ExporterConfigV1Alpha1DriverInstanceProxy(BaseModel): @@ -41,7 +73,7 @@ class ExporterConfigV1Alpha1DriverInstance(RootModel): | ExporterConfigV1Alpha1DriverInstanceProxy ) - def instantiate(self) -> Driver: + def instantiate(self) -> "Driver": match self.root: case ExporterConfigV1Alpha1DriverInstanceBase(): driver_class = import_class(self.root.type, [], True) @@ -52,7 +84,7 @@ def instantiate(self) -> Driver: description=self.root.description, methods_description=self.root.methods_description, children=children, - **self.root.config + **self.root.config, ) case ExporterConfigV1Alpha1DriverInstanceComposite(): @@ -93,6 +125,7 @@ class ExporterConfigV1Alpha1(BaseModel): description: str | None = None export: dict[str, ExporterConfigV1Alpha1DriverInstance] = Field(default_factory=dict) + hooks: HookConfigV1Alpha1 = Field(default_factory=HookConfigV1Alpha1) path: Path | None = Field(default=None) @@ -127,7 +160,7 @@ def list(cls) -> ExporterConfigListV1Alpha1: @classmethod def dump_yaml(self, config: Self) -> str: - return yaml.safe_dump(config.model_dump(mode="json", exclude={"alias", "path"}), sort_keys=False) + return yaml.safe_dump(config.model_dump(mode="json", by_alias=True, exclude={"alias", "path"}), sort_keys=False) @classmethod def save(cls, config: Self, path: Optional[str] = None) -> Path: @@ -138,7 +171,7 @@ def save(cls, config: Self, path: Optional[str] = None) -> Path: else: config.path = Path(path) with config.path.open(mode="w") as f: - yaml.safe_dump(config.model_dump(mode="json", exclude={"alias", "path"}), f, sort_keys=False) + yaml.safe_dump(config.model_dump(mode="json", by_alias=True, exclude={"alias", "path"}), f, sort_keys=False) return config.path @classmethod @@ -150,6 +183,7 @@ def delete(cls, alias: str) -> Path: @asynccontextmanager async def serve_unix_async(self): # dynamic import to avoid circular imports + from jumpstarter.common import ExporterStatus from jumpstarter.exporter import Session with Session( @@ -160,6 +194,8 @@ async def serve_unix_async(self): ).instantiate(), ) as session: async with session.serve_unix_async() as path: + # For local usage, set status to LEASE_READY since there's no lease/hook flow + session.update_status(ExporterStatus.LEASE_READY) yield path @contextmanager @@ -176,7 +212,7 @@ async def create_exporter(self): from jumpstarter.exporter import Exporter - async def channel_factory(): + async def channel_factory() -> grpc.aio.Channel: if self.endpoint is None or self.token is None: raise ConfigurationError("endpoint or token not set in exporter config") credentials = grpc.composite_channel_credentials( @@ -185,6 +221,16 @@ async def channel_factory(): ) return aio_secure_channel(self.endpoint, credentials, self.grpcOptions) + # Create hook executor if hooks are configured + hook_executor = None + if self.hooks.before_lease or self.hooks.after_lease: + from jumpstarter.exporter.hooks import HookExecutor + + hook_executor = HookExecutor( + config=self.hooks, + device_factory=ExporterConfigV1Alpha1DriverInstance(children=self.export).instantiate, + ) + exporter = None entered = False try: @@ -197,6 +243,7 @@ async def channel_factory(): ).instantiate, tls=self.tls, grpc_options=self.grpcOptions, + hook_executor=hook_executor, ) # Initialize the exporter (registration, etc.) await exporter.__aenter__() diff --git a/packages/jumpstarter/jumpstarter/config/exporter_test.py b/packages/jumpstarter/jumpstarter/config/exporter_test.py index e9fb48630..68d0e3f42 100644 --- a/packages/jumpstarter/jumpstarter/config/exporter_test.py +++ b/packages/jumpstarter/jumpstarter/config/exporter_test.py @@ -101,3 +101,56 @@ def test_exporter_config(monkeypatch: pytest.MonkeyPatch, tmp_path: Path): ExporterConfigV1Alpha1.save(config) assert config == ExporterConfigV1Alpha1.load("test") + + +def test_exporter_config_with_hooks(monkeypatch: pytest.MonkeyPatch, tmp_path: Path): + monkeypatch.setattr(ExporterConfigV1Alpha1, "BASE_PATH", tmp_path) + + path = tmp_path / "test-hooks.yaml" + + text = """apiVersion: jumpstarter.dev/v1alpha1 +kind: ExporterConfig +metadata: + namespace: default + name: test-hooks +endpoint: "jumpstarter.my-lab.com:1443" +token: "test-token" +hooks: + beforeLease: + script: | + echo "Pre-lease hook for $LEASE_NAME" + j power on + timeout: 600 + afterLease: + script: | + echo "Post-lease hook for $LEASE_NAME" + j power off + timeout: 600 +export: + power: + type: "jumpstarter_driver_power.driver.PduPower" +""" + path.write_text( + text, + encoding="utf-8", + ) + + config = ExporterConfigV1Alpha1.load("test-hooks") + + assert config.hooks.before_lease.script == 'echo "Pre-lease hook for $LEASE_NAME"\nj power on\n' + assert config.hooks.after_lease.script == 'echo "Post-lease hook for $LEASE_NAME"\nj power off\n' + + # Test that it round-trips correctly + path.unlink() + ExporterConfigV1Alpha1.save(config) + reloaded_config = ExporterConfigV1Alpha1.load("test-hooks") + + assert reloaded_config.hooks.before_lease.script == config.hooks.before_lease.script + assert reloaded_config.hooks.after_lease.script == config.hooks.after_lease.script + + # Test that the YAML uses camelCase + yaml_output = ExporterConfigV1Alpha1.dump_yaml(config) + assert "beforeLease:" in yaml_output + assert "afterLease:" in yaml_output + assert "before_lease:" not in yaml_output + assert "after_lease:" not in yaml_output diff --git a/packages/jumpstarter/jumpstarter/driver/base.py b/packages/jumpstarter/jumpstarter/driver/base.py index 8c67264c8..fa798d2e8 100644 --- a/packages/jumpstarter/jumpstarter/driver/base.py +++ b/packages/jumpstarter/jumpstarter/driver/base.py @@ -27,7 +27,7 @@ MARKER_STREAMCALL, MARKER_STREAMING_DRIVERCALL, ) -from jumpstarter.common import Metadata +from jumpstarter.common import LogSource, Metadata from jumpstarter.common.resources import ClientStreamResource, PresignedRequestResource, Resource, ResourceMetadata from jumpstarter.common.serde import decode_value, encode_value from jumpstarter.common.streams import ( @@ -35,6 +35,7 @@ ResourceStreamRequest, ) from jumpstarter.config.env import JMP_DISABLE_COMPRESSION +from jumpstarter.exporter.logging import get_logger from jumpstarter.streams.aiohttp import AiohttpStreamReaderStream from jumpstarter.streams.common import create_memory_stream from jumpstarter.streams.encoding import Compression, compress_stream @@ -86,7 +87,7 @@ def __post_init__(self): if hasattr(super(), "__post_init__"): super().__post_init__() - self.logger = logging.getLogger(self.__class__.__name__) + self.logger = get_logger(f"driver.{self.__class__.__name__}", LogSource.DRIVER) self.logger.setLevel(self.log_level) def close(self): diff --git a/packages/jumpstarter/jumpstarter/exporter/exporter.py b/packages/jumpstarter/jumpstarter/exporter/exporter.py index a33a6a9be..15e8c37d0 100644 --- a/packages/jumpstarter/jumpstarter/exporter/exporter.py +++ b/packages/jumpstarter/jumpstarter/exporter/exporter.py @@ -2,12 +2,13 @@ from collections.abc import AsyncGenerator, Awaitable, Callable from contextlib import asynccontextmanager from dataclasses import dataclass, field -from typing import Self +from typing import TYPE_CHECKING, Any, Self import grpc from anyio import ( AsyncContextManagerMixin, CancelScope, + Event, connect_unix, create_memory_object_stream, create_task_group, @@ -21,75 +22,354 @@ jumpstarter_pb2_grpc, ) -from jumpstarter.common import Metadata +from jumpstarter.common import ExporterStatus, Metadata from jumpstarter.common.streams import connect_router_stream from jumpstarter.config.tls import TLSConfigV1Alpha1 -from jumpstarter.driver import Driver +from jumpstarter.exporter.hooks import HookExecutor +from jumpstarter.exporter.lease_context import LeaseContext from jumpstarter.exporter.session import Session +if TYPE_CHECKING: + from jumpstarter.driver import Driver + logger = logging.getLogger(__name__) @dataclass(kw_only=True) class Exporter(AsyncContextManagerMixin, Metadata): + """Represents a Jumpstarter Exporter runtime instance. + + Inherits from Metadata, which provides: + uuid: Unique identifier for the exporter instance (UUID4) + labels: Key-value labels for exporter identification and selector matching + """ + + # Public Configuration Fields + channel_factory: Callable[[], Awaitable[grpc.aio.Channel]] - device_factory: Callable[[], Driver] - lease_name: str = field(init=False, default="") + """Factory function for creating gRPC channels to communicate with the controller. + + Called multiple times throughout the exporter lifecycle to establish connections. + The factory should handle authentication, credentials, and channel configuration. + Used when creating controller stubs, unregistering, and establishing streams. + """ + + device_factory: Callable[[], "Driver"] + """Factory function for creating Driver instances representing the hardware/devices. + + Called when creating Sessions to provide access to the underlying device. + The Driver can contain child drivers in a composite pattern, representing + the full device tree being exported. Typically created from ExporterConfigV1Alpha1. + """ + tls: TLSConfigV1Alpha1 = field(default_factory=TLSConfigV1Alpha1) + """TLS/SSL configuration for secure communication with router and controller. + + Contains certificate authority (ca) and insecure flag for certificate verification. + Passed to connect_router_stream() when handling client connections. + Default creates empty config with ca="" and insecure=False. + """ + grpc_options: dict[str, str] = field(default_factory=dict) - registered: bool = field(init=False, default=False) + """Custom gRPC channel options that override or supplement default settings. + + Merged with defaults (round_robin load balancing, keepalive settings, etc.). + Configured via YAML as grpcOptions in exporter config. + Passed to connect_router_stream() for client connections. + """ + + hook_executor: HookExecutor | None = field(default=None) + """Optional executor for lifecycle hooks (before-lease and after-lease). + + When configured, runs custom scripts at key points in the lease lifecycle: + - before-lease: Runs when transitioning to leased state (setup, validation) + - after-lease: Runs when transitioning from leased state (cleanup, reset) + Created when hooks.before_lease or hooks.after_lease are defined in config. + """ + + # Internal State Fields + + _registered: bool = field(init=False, default=False) + """Tracks whether exporter has successfully registered with the controller. + + Set to True after successful registration. Used to determine if unregistration + is needed during cleanup. + """ + _unregister: bool = field(init=False, default=False) + """Internal flag indicating whether to actively unregister during shutdown. + + Set when stop(should_unregister=True) is called. When False, relies on + heartbeat timeout for implicit unregistration. + """ + _stop_requested: bool = field(init=False, default=False) + """Internal flag indicating a graceful stop has been requested. + + Set to True when stop(wait_for_lease_exit=True) is called. The exporter + waits for the current lease to exit before stopping. + """ + _started: bool = field(init=False, default=False) + """Internal flag tracking whether the exporter has started serving. + + Set to True when the first lease is assigned. Used to determine immediate + vs graceful stop behavior. + """ + _tg: TaskGroup | None = field(init=False, default=None) + """Reference to the anyio TaskGroup managing concurrent tasks. + + Manages streams and connection handling tasks. Used to cancel all tasks + when stopping. Set during serve() and cleared when done. + """ + + _exporter_status: ExporterStatus = field(init=False, default=ExporterStatus.OFFLINE) + """Current status of the exporter. + + Updated via _update_status() and reported to controller and session. + Possible values: OFFLINE, AVAILABLE, BEFORE_LEASE_HOOK, LEASE_READY, + AFTER_LEASE_HOOK, BEFORE_LEASE_HOOK_FAILED, AFTER_LEASE_HOOK_FAILED. + """ + + _previous_leased: bool = field(init=False, default=False) + """Previous lease state used to detect lease state transitions. + + Tracks whether the exporter was leased in the previous status check to + determine when to trigger before-lease and after-lease hooks. + """ - def stop(self, wait_for_lease_exit=False, should_unregister=False): + _exit_code: int | None = field(init=False, default=None) + """Exit code to use when the exporter shuts down. + + When set to a non-zero value, the exporter should terminate permanently + (not restart). This is used by hooks with on_failure='exit' to signal + that the exporter should shut down and not be restarted by the CLI. + """ + + _lease_context: LeaseContext | None = field(init=False, default=None) + """Encapsulates all resources associated with the current lease. + + Contains the session, socket path, and synchronization event needed + throughout the lease lifecycle. This replaces the previous individual + _current_session, _session_socket_path, and _before_lease_hook fields. + + Lifecycle: + 1. Created in serve() when a lease is assigned (session/socket initially None) + 2. Populated in handle_lease() when the session is created + 3. Accessed by hook execution methods and status reporting + 4. Cleared when lease ends or changes + + The session and socket are managed by the context manager in handle_lease(), + ensuring proper cleanup when the lease ends. The LeaseScope itself is just + a reference holder and doesn't manage resource lifecycles directly. + """ + + def stop(self, wait_for_lease_exit=False, should_unregister=False, exit_code: int | None = None): """Signal the exporter to stop. Args: wait_for_lease_exit (bool): If True, wait for the current lease to exit before stopping. should_unregister (bool): If True, unregister from controller. Otherwise rely on heartbeat. + exit_code (int | None): If set, the exporter will exit with this code (non-zero means no restart). """ + # Set exit code if provided + if exit_code is not None: + self._exit_code = exit_code # Stop immediately if not started yet or if immediate stop is requested if (not self._started or not wait_for_lease_exit) and self._tg is not None: - logger.info("Stopping exporter immediately, unregister from controller=%s", should_unregister) + if should_unregister: + logger.info("Stopping exporter immediately, unregistering from controller") + else: + logger.info("Stopping exporter immediately, will not unregister from controller") self._unregister = should_unregister + # Cancel any ongoing tasks self._tg.cancel_scope.cancel() elif not self._stop_requested: self._stop_requested = True logger.info("Exporter marked for stop upon lease exit") + @property + def exit_code(self) -> int | None: + """Get the exit code for the exporter. + + Returns: + The exit code if set, or None if the exporter should restart. + """ + return self._exit_code + + async def _get_controller_stub(self) -> jumpstarter_pb2_grpc.ControllerServiceStub: + """Create and return a controller service stub.""" + return jumpstarter_pb2_grpc.ControllerServiceStub(await self.channel_factory()) + + async def _retry_stream( + self, + stream_name: str, + stream_factory: Callable[[jumpstarter_pb2_grpc.ControllerServiceStub], AsyncGenerator], + send_tx, + retries: int = 5, + backoff: float = 3.0, + ): + """Generic retry wrapper for gRPC streaming calls. + + Args: + stream_name: Name of the stream for logging purposes + stream_factory: Function that takes a controller stub and returns an async generator + send_tx: Transmission channel to send stream items to + retries: Maximum number of retry attempts + backoff: Seconds to wait between retries + """ + retries_left = retries + while True: + try: + controller = await self._get_controller_stub() + async for item in stream_factory(controller): + await send_tx.send(item) + except Exception as e: + if retries_left > 0: + retries_left -= 1 + logger.info( + "%s stream interrupted, restarting in %ss, %s retries left: %s", + stream_name, + backoff, + retries_left, + e, + ) + await sleep(backoff) + else: + raise + else: + retries_left = retries + + def _listen_stream_factory( + self, lease_name: str + ) -> Callable[[jumpstarter_pb2_grpc.ControllerServiceStub], AsyncGenerator[jumpstarter_pb2.ListenResponse, None]]: + """Create a stream factory for listening to connection requests.""" + + def factory( + ctrl: jumpstarter_pb2_grpc.ControllerServiceStub, + ) -> AsyncGenerator[jumpstarter_pb2.ListenResponse, None]: + return ctrl.Listen(jumpstarter_pb2.ListenRequest(lease_name=lease_name)) + + return factory + + def _status_stream_factory( + self, + ) -> Callable[[jumpstarter_pb2_grpc.ControllerServiceStub], AsyncGenerator[jumpstarter_pb2.StatusResponse, None]]: + """Create a stream factory for status updates.""" + + def factory( + ctrl: jumpstarter_pb2_grpc.ControllerServiceStub, + ) -> AsyncGenerator[jumpstarter_pb2.StatusResponse, None]: + return ctrl.Status(jumpstarter_pb2.StatusRequest()) + + return factory + + async def _register_with_controller(self, local_channel: grpc.aio.Channel): + """Register the exporter with the controller. + + Args: + local_channel: The local Unix socket channel to get device reports from + """ + # Get device reports from the local session + exporter_stub = jumpstarter_pb2_grpc.ExporterServiceStub(local_channel) + response: jumpstarter_pb2.GetReportResponse = await exporter_stub.GetReport(empty_pb2.Empty()) + + # Register with the REMOTE controller (not the local session) + logger.info("Registering exporter with controller") + controller = await self._get_controller_stub() + await controller.Register( + jumpstarter_pb2.RegisterRequest( + labels=self.labels, + reports=response.reports, + ) + ) + # Mark exporter as registered internally + self._registered = True + # Only report AVAILABLE status during initial registration (no lease context) + # During per-lease registration, status is managed by serve() to avoid + # overwriting LEASE_READY with AVAILABLE + if self._lease_context is None: + await self._report_status(ExporterStatus.AVAILABLE, "Exporter registered and available") + + async def _report_status(self, status: ExporterStatus, message: str = ""): + """Report the exporter status with the controller and session.""" + self._exporter_status = status + + # Update status in lease context (handles session update internally) + # This ensures status is stored even before session is created + if self._lease_context: + self._lease_context.update_status(status, message) + + try: + controller = await self._get_controller_stub() + await controller.ReportStatus( + jumpstarter_pb2.ReportStatusRequest( + status=status.to_proto(), + message=message, + ) + ) + logger.info(f"Updated status to {status}: {message}") + except Exception as e: + logger.error(f"Failed to update status: {e}") + + async def _unregister_with_controller(self): + """Safely unregister from controller with timeout and error handling.""" + if not (self._registered and self._unregister): + return + + logger.info("Unregistering exporter with controller") + try: + with move_on_after(10): # 10 second timeout + channel = await self.channel_factory() + try: + controller = jumpstarter_pb2_grpc.ControllerServiceStub(channel) + await self._report_status(ExporterStatus.OFFLINE, "Exporter shutting down") + await controller.Unregister( + jumpstarter_pb2.UnregisterRequest( + reason="Exporter shutdown", + ) + ) + logger.info("Controller unregistration completed successfully") + finally: + with CancelScope(shield=True): + await channel.close() + except Exception as e: + logger.error("Error during controller unregistration: %s", e, exc_info=True) + @asynccontextmanager async def __asynccontextmanager__(self) -> AsyncGenerator[Self]: try: yield self finally: try: - if self.registered and self._unregister: - logger.info("Unregistering exporter with controller") - try: - with move_on_after(10): # 10 second timeout - channel = await self.channel_factory() - try: - controller = jumpstarter_pb2_grpc.ControllerServiceStub(channel) - await controller.Unregister( - jumpstarter_pb2.UnregisterRequest( - reason="Exporter shutdown", - ) - ) - logger.info("Controller unregistration completed successfully") - finally: - with CancelScope(shield=True): - await channel.close() - except Exception as e: - logger.error("Error during controller unregistration: %s", e, exc_info=True) - + await self._unregister_with_controller() except Exception as e: logger.error("Error during exporter cleanup: %s", e, exc_info=True) # Don't re-raise to avoid masking the original exception - async def __handle(self, path, endpoint, token, tls_config, grpc_options): + async def _handle_client_conn( + self, path: str, endpoint: str, token: str, tls_config: TLSConfigV1Alpha1, grpc_options: dict[str, Any] | None + ) -> None: + """Handle a single client connection by proxying between session and router. + + This method establishes a connection from the local session Unix socket to the + router endpoint, creating a bidirectional proxy that allows the client to + communicate with the device through the router infrastructure. + + Args: + path: Unix socket path where the session is serving + endpoint: Router endpoint URL to connect to + token: Authentication token for the router connection + tls_config: TLS configuration for secure router communication + grpc_options: Optional gRPC channel options for the router connection + + Note: + This is a private method spawned as a concurrent task by handle_lease_conn() + for each incoming connection request. It runs until the client disconnects + or an error occurs. + """ try: async with await connect_unix(path) as stream: async with connect_router_stream(endpoint, token, stream, tls_config, grpc_options): @@ -99,60 +379,89 @@ async def __handle(self, path, endpoint, token, tls_config, grpc_options): @asynccontextmanager async def session(self): - controller = jumpstarter_pb2_grpc.ControllerServiceStub(await self.channel_factory()) + """Create and manage an exporter Session context. + + Yields: + tuple[Session, str]: A tuple of (session, socket_path) for use in lease handling. + """ with Session( uuid=self.uuid, labels=self.labels, root_device=self.device_factory(), ) as session: + # Create a Unix socket async with session.serve_unix_async() as path: + # Create a gRPC channel to the controller via the socket async with grpc.aio.secure_channel( f"unix://{path}", grpc.local_channel_credentials(grpc.LocalConnectionType.UDS) ) as channel: - response = await jumpstarter_pb2_grpc.ExporterServiceStub(channel).GetReport(empty_pb2.Empty()) - logger.info("Registering exporter with controller") - await controller.Register( - jumpstarter_pb2.RegisterRequest( - labels=self.labels, - reports=response.reports, - ) - ) - self.registered = True - yield path + # Register the exporter with the controller + await self._register_with_controller(channel) + # Yield both session and path for creating LeaseScope + yield session, path + + async def handle_lease(self, lease_name: str, tg: TaskGroup, lease_scope: LeaseContext) -> None: + """Handle all incoming client connections for a lease. - async def handle(self, lease_name, tg): + This method orchestrates the complete lifecycle of managing connections during + a lease period. It listens for connection requests and spawns individual + tasks to handle each client connection. + + The method performs the following steps: + 1. Creates a session for the lease duration + 2. Populates the lease_scope with session and socket path + 3. Sets up a stream to listen for incoming connection requests + 4. Waits for the before-lease hook to complete (if configured) + 5. Spawns a new task for each incoming connection request + + Args: + lease_name: Name of the lease to handle connections for + tg: TaskGroup for spawning concurrent connection handler tasks + lease_scope: LeaseScope with before_lease_hook event (session/socket set here) + + Note: + This method runs for the entire duration of the lease and is spawned by + the serve() method when a lease is assigned. It terminates when the lease + ends or the exporter stops. + """ logger.info("Listening for incoming connection requests on lease %s", lease_name) - listen_tx, listen_rx = create_memory_object_stream() + listen_tx, listen_rx = create_memory_object_stream[jumpstarter_pb2.ListenResponse]() - async def listen(retries=5, backoff=3): - retries_left = retries - while True: - try: - controller = jumpstarter_pb2_grpc.ControllerServiceStub(await self.channel_factory()) - async for request in controller.Listen(jumpstarter_pb2.ListenRequest(lease_name=lease_name)): - await listen_tx.send(request) - except Exception as e: - if retries_left > 0: - retries_left -= 1 - logger.info( - "Listen stream interrupted, restarting in {}s, {} retries left: {}".format( - backoff, retries_left, e - ) - ) - await sleep(backoff) - else: - raise - else: - retries_left = retries + # Start listening for connection requests with retry logic + tg.start_soon( + self._retry_stream, + "Listen", + self._listen_stream_factory(lease_name), + listen_tx, + ) + + # Create session for the lease duration and populate lease_scope + async with self.session() as (session, path): + # Populate the lease scope with session and socket path + lease_scope.session = session + lease_scope.socket_path = path - tg.start_soon(listen) + # Wait for before-lease hook to complete before processing client connections + logger.info("Waiting for before-lease hook to complete before accepting connections") + await lease_scope.before_lease_hook.wait() + logger.info("Before-lease hook completed, now accepting connections") - async with self.session() as path: + # Sync status to session AFTER hook completes - this ensures we have LEASE_READY + # status from serve() rather than the default AVAILABLE + session.update_status(lease_scope.current_status, lease_scope.status_message) + + # Process client connections + # Type: request is jumpstarter_pb2.ListenResponse with router_endpoint and router_token fields async for request in listen_rx: logger.info("Handling new connection request on lease %s", lease_name) tg.start_soon( - self.__handle, path, request.router_endpoint, request.router_token, self.tls, self.grpc_options + self._handle_client_conn, + lease_scope.socket_path, + request.router_endpoint, + request.router_token, + self.tls, + self.grpc_options, ) async def serve(self): # noqa: C901 @@ -162,47 +471,99 @@ async def serve(self): # noqa: C901 # initial registration async with self.session(): pass - status_tx, status_rx = create_memory_object_stream() - - async def status(retries=5, backoff=3): - retries_left = retries - while True: - try: - controller = jumpstarter_pb2_grpc.ControllerServiceStub(await self.channel_factory()) - async for status in controller.Status(jumpstarter_pb2.StatusRequest()): - await status_tx.send(status) - except Exception as e: - if retries_left > 0: - retries_left -= 1 - logger.info( - "Status stream interrupted, restarting in {}s, {} retries left: {}".format( - backoff, retries_left, e - ) - ) - await sleep(backoff) - else: - raise - else: - retries_left = retries + status_tx, status_rx = create_memory_object_stream[jumpstarter_pb2.StatusResponse]() async with create_task_group() as tg: self._tg = tg - tg.start_soon(status) + # Start status stream with retry logic + tg.start_soon( + self._retry_stream, + "Status", + self._status_stream_factory(), + status_tx, + ) async for status in status_rx: - if self.lease_name != "" and self.lease_name != status.lease_name: - self.lease_name = status.lease_name + # Check if lease name changed (and there was a previous active lease) + lease_changed = ( + self._lease_context + and self._lease_context.is_active() + and self._lease_context.lease_name != status.lease_name + ) + if lease_changed: + # After-lease hook for the previous lease (lease name changed) + if self.hook_executor and self._lease_context.has_client(): + with CancelScope(shield=True): + await self.hook_executor.run_after_lease_hook( + self._lease_context, + self._report_status, + self.stop, + ) + logger.info("Lease status changed, killing existing connections") + # Clear lease scope for next lease + self._lease_context = None self.stop() break - self.lease_name = status.lease_name - if not self._started and self.lease_name != "": + + # Check for lease state transitions + previous_leased = self._previous_leased + current_leased = status.leased + + # Check if this is a new lease assignment (first time or lease name changed) + if not self._started and status.lease_name != "": self._started = True - tg.start_soon(self.handle, self.lease_name, tg) - if status.leased: + # Create lease scope and start handling the lease + # The session will be created inside handle_lease and stay open for the lease duration + lease_scope = LeaseContext( + lease_name=status.lease_name, + before_lease_hook=Event(), + ) + self._lease_context = lease_scope + tg.start_soon(self.handle_lease, status.lease_name, tg, lease_scope) + + if current_leased: logger.info("Currently leased by %s under %s", status.client_name, status.lease_name) + if self._lease_context: + self._lease_context.update_client(status.client_name) + + # Before-lease hook when transitioning from unleased to leased + if not previous_leased: + if self.hook_executor and self._lease_context: + tg.start_soon( + self.hook_executor.run_before_lease_hook, + self._lease_context, + self._report_status, + self.stop, # Pass shutdown callback + ) + else: + # No hook configured, set event immediately + await self._report_status(ExporterStatus.LEASE_READY, "Ready for commands") + if self._lease_context: + self._lease_context.before_lease_hook.set() else: logger.info("Currently not leased") + + # After-lease hook when transitioning from leased to unleased + if ( + previous_leased + and self.hook_executor + and self._lease_context + and self._lease_context.has_client() + ): + # Shield the after-lease hook from cancellation + with CancelScope(shield=True): + await self.hook_executor.run_after_lease_hook( + self._lease_context, + self._report_status, + self.stop, + ) + + # Clear lease scope for next lease + self._lease_context = None + if self._stop_requested: self.stop(should_unregister=True) break + + self._previous_leased = current_leased self._tg = None diff --git a/packages/jumpstarter/jumpstarter/exporter/hooks.py b/packages/jumpstarter/jumpstarter/exporter/hooks.py new file mode 100644 index 000000000..d3ec0e36c --- /dev/null +++ b/packages/jumpstarter/jumpstarter/exporter/hooks.py @@ -0,0 +1,434 @@ +"""Lifecycle hooks for Jumpstarter exporters.""" + +import logging +import os +import subprocess +from collections.abc import Awaitable +from dataclasses import dataclass +from typing import TYPE_CHECKING, Callable, Literal + +import anyio +from anyio import open_process + +from jumpstarter.common import ExporterStatus, LogSource +from jumpstarter.config.env import JMP_DRIVERS_ALLOW, JUMPSTARTER_HOST +from jumpstarter.config.exporter import HookConfigV1Alpha1, HookInstanceConfigV1Alpha1 +from jumpstarter.exporter.session import Session + +if TYPE_CHECKING: + from jumpstarter.driver import Driver + from jumpstarter.exporter.lease_context import LeaseContext + +logger = logging.getLogger(__name__) + + +@dataclass +class HookExecutionError(Exception): + """Raised when a hook fails and on_failure is set to 'endLease' or 'exit'. + + Attributes: + message: Error message describing the failure + on_failure: The on_failure mode that triggered this error ('endLease' or 'exit') + hook_type: The type of hook that failed ('before_lease' or 'after_lease') + """ + + message: str + on_failure: Literal["endLease", "exit"] + hook_type: Literal["before_lease", "after_lease"] + + def __str__(self) -> str: + return self.message + + def should_shutdown_exporter(self) -> bool: + """Returns True if the exporter should be shut down entirely.""" + return self.on_failure == "exit" + + def should_end_lease(self) -> bool: + """Returns True if the lease should be ended.""" + return self.on_failure in ("endLease", "exit") + + +@dataclass(kw_only=True) +class HookExecutor: + """Executes lifecycle hooks with access to the j CLI.""" + + config: HookConfigV1Alpha1 + device_factory: Callable[[], "Driver"] + + def _create_hook_env(self, lease_scope: "LeaseContext") -> dict[str, str]: + """Create standardized hook environment variables. + + Args: + lease_scope: LeaseScope containing lease metadata and socket path + + Returns: + Dictionary of environment variables for hook execution + """ + hook_env = os.environ.copy() + hook_env.update( + { + JUMPSTARTER_HOST: str(lease_scope.socket_path), + JMP_DRIVERS_ALLOW: "UNSAFE", # Allow all drivers for local access + "LEASE_NAME": lease_scope.lease_name, + "CLIENT_NAME": lease_scope.client_name, + } + ) + return hook_env + + async def _execute_hook( + self, + hook_config: HookInstanceConfigV1Alpha1, + lease_scope: "LeaseContext", + log_source: LogSource, + ) -> None: + """Execute a single hook command. + + Args: + hook_config: Hook configuration including script, timeout, and on_failure + lease_scope: LeaseScope containing lease metadata and session + log_source: Log source for hook output + """ + command = hook_config.script + if not command or not command.strip(): + logger.debug("Hook command is empty, skipping") + return + + logger.info("Executing hook: %s", command.strip().split("\n")[0][:100]) + + # Determine hook type from log source + hook_type = "before_lease" if log_source == LogSource.BEFORE_LEASE_HOOK else "after_lease" + + # Use existing session from lease_scope + hook_env = self._create_hook_env(lease_scope) + + return await self._execute_hook_process( + hook_config, lease_scope, log_source, hook_env, lease_scope.session, hook_type + ) + + def _handle_hook_failure( + self, + error_msg: str, + on_failure: Literal["warn", "endLease", "exit"], + hook_type: Literal["before_lease", "after_lease"], + cause: Exception | None = None, + ) -> None: + """Handle hook failure according to on_failure setting. + + Args: + error_msg: Error message describing the failure + on_failure: The on_failure mode ('warn', 'endLease', or 'exit') + hook_type: The type of hook that failed + cause: Optional exception that caused the failure + + Raises: + HookExecutionError: If on_failure is 'endLease' or 'exit' + """ + if on_failure == "warn": + logger.warning("%s (on_failure=warn, continuing)", error_msg) + return + + logger.error("%s (on_failure=%s, raising exception)", error_msg, on_failure) + + error = HookExecutionError( + message=error_msg, + on_failure=on_failure, + hook_type=hook_type, + ) + + # Properly handle exception chaining + if cause is not None: + raise error from cause + else: + raise error + + async def _execute_hook_process( + self, + hook_config: HookInstanceConfigV1Alpha1, + lease_scope: "LeaseContext", + log_source: LogSource, + hook_env: dict[str, str], + logging_session: Session, + hook_type: Literal["before_lease", "after_lease"], + ) -> None: + """Execute the hook process with the given environment and logging session. + + Uses anyio for subprocess execution to be compatible with the anyio-based exporter. + """ + + + command = hook_config.script + timeout = hook_config.timeout + on_failure = hook_config.on_failure + + # Exception handling + error_msg: str | None = None + cause: Exception | None = None + timed_out = False + + try: + # Execute the hook command using shell via anyio + # Pass the command as a string to use shell mode + async with await open_process( + command, + env=hook_env, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + ) as process: + output_lines: list[str] = [] + + async def read_output() -> None: + """Read stdout line by line.""" + assert process.stdout is not None + buffer = b"" + async for chunk in process.stdout: + buffer += chunk + while b"\n" in buffer: + line, buffer = buffer.split(b"\n", 1) + line_decoded = line.decode().rstrip() + output_lines.append(line_decoded) + logger.info("[hook output] %s", line_decoded) + # Handle any remaining data without newline + if buffer: + line_decoded = buffer.decode().rstrip() + if line_decoded: + output_lines.append(line_decoded) + logger.info("[hook output] %s", line_decoded) + + # Use move_on_after for timeout + with anyio.move_on_after(timeout) as cancel_scope: + await read_output() + await process.wait() + + if cancel_scope.cancelled_caught: + timed_out = True + error_msg = f"Hook timed out after {timeout} seconds" + logger.error(error_msg) + # Terminate the process + process.terminate() + # Give it a moment to terminate gracefully + with anyio.move_on_after(5): + await process.wait() + # Force kill if still running + if process.returncode is None: + process.kill() + + elif process.returncode == 0: + logger.info("Hook executed successfully") + return + else: + error_msg = f"Hook failed with exit code {process.returncode}" + + except Exception as e: + error_msg = f"Error executing hook: {e}" + cause = e + logger.error(error_msg, exc_info=True) + + # Handle failure if one occurred + if error_msg is not None: + # For timeout, create a TimeoutError as the cause + if timed_out and cause is None: + cause = TimeoutError(error_msg) + self._handle_hook_failure(error_msg, on_failure, hook_type, cause) + + async def execute_before_lease_hook(self, lease_scope: "LeaseContext") -> None: + """Execute the before-lease hook. + + Args: + lease_scope: LeaseScope with lease metadata and session + + Raises: + HookExecutionError: If hook fails and on_failure is set to 'endLease' or 'exit' + """ + if not self.config.before_lease: + logger.debug("No before-lease hook configured") + return + + logger.info("Executing before-lease hook for lease %s", lease_scope.lease_name) + await self._execute_hook( + self.config.before_lease, + lease_scope, + LogSource.BEFORE_LEASE_HOOK, + ) + + async def execute_after_lease_hook(self, lease_scope: "LeaseContext") -> None: + """Execute the after-lease hook. + + Args: + lease_scope: LeaseScope with lease metadata and session + + Raises: + HookExecutionError: If hook fails and on_failure is set to 'endLease' or 'exit' + """ + if not self.config.after_lease: + logger.debug("No after-lease hook configured") + return + + logger.info("Executing after-lease hook for lease %s", lease_scope.lease_name) + await self._execute_hook( + self.config.after_lease, + lease_scope, + LogSource.AFTER_LEASE_HOOK, + ) + + async def run_before_lease_hook( + self, + lease_scope: "LeaseContext", + report_status: Callable[["ExporterStatus", str], Awaitable[None]], + shutdown: Callable[..., None], + ) -> None: + """Execute before-lease hook with full orchestration. + + This method handles the complete lifecycle of running a before-lease hook: + - Waits for the lease scope to be ready (session/socket populated) + - Reports status changes via the provided callback + - Sets up the hook executor with the session for logging + - Executes the hook and handles errors + - Always signals the before_lease_hook event to unblock connections + + Args: + lease_scope: LeaseScope containing session, socket_path, and sync event + report_status: Async callback to report status changes to controller + shutdown: Callback to trigger exporter shutdown (accepts optional exit_code kwarg) + """ + try: + # Wait for lease scope to be fully populated by handle_lease + # This is necessary because handle_lease and run_before_lease_hook run concurrently + timeout = 30 # seconds + interval = 0.1 # seconds + elapsed = 0.0 + while not lease_scope.is_ready(): + if elapsed >= timeout: + error_msg = "Timeout waiting for lease scope to be ready" + logger.error(error_msg) + await report_status(ExporterStatus.BEFORE_LEASE_HOOK_FAILED, error_msg) + lease_scope.before_lease_hook.set() + return + await anyio.sleep(interval) + elapsed += interval + + # Check if hook is configured + if not self.config.before_lease: + logger.debug("No before-lease hook configured") + await report_status(ExporterStatus.LEASE_READY, "Ready for commands") + return + + await report_status(ExporterStatus.BEFORE_LEASE_HOOK, "Running beforeLease hook") + + # Execute hook with lease scope + logger.info("Executing before-lease hook for lease %s", lease_scope.lease_name) + await self._execute_hook( + self.config.before_lease, + lease_scope, + LogSource.BEFORE_LEASE_HOOK, + ) + + await report_status(ExporterStatus.LEASE_READY, "Ready for commands") + logger.info("beforeLease hook completed successfully") + + except HookExecutionError as e: + if e.should_shutdown_exporter(): + # on_failure='exit' - shut down the entire exporter + logger.error("beforeLease hook failed with on_failure='exit': %s", e) + await report_status( + ExporterStatus.BEFORE_LEASE_HOOK_FAILED, + f"beforeLease hook failed (on_failure=exit, shutting down): {e}", + ) + logger.error("Shutting down exporter due to beforeLease hook failure with on_failure='exit'") + # Exit code 1 tells the CLI not to restart the exporter + shutdown(exit_code=1) + else: + # on_failure='endLease' - just block this lease, exporter stays available + logger.error("beforeLease hook failed with on_failure='endLease': %s", e) + await report_status( + ExporterStatus.BEFORE_LEASE_HOOK_FAILED, + f"beforeLease hook failed (on_failure=endLease): {e}", + ) + # TODO: We need to implement a controller-side mechanism to end the lease here + + except Exception as e: + logger.error("beforeLease hook failed with unexpected error: %s", e, exc_info=True) + await report_status( + ExporterStatus.BEFORE_LEASE_HOOK_FAILED, + f"beforeLease hook failed: {e}", + ) + # Unexpected errors don't trigger shutdown - just block the lease + + finally: + # Always set the event to unblock connections + lease_scope.before_lease_hook.set() + + async def run_after_lease_hook( + self, + lease_scope: "LeaseContext", + report_status: Callable[["ExporterStatus", str], Awaitable[None]], + shutdown: Callable[..., None], + ) -> None: + """Execute after-lease hook with full orchestration. + + This method handles the complete lifecycle of running an after-lease hook: + - Validates that the lease scope is ready + - Reports status changes via the provided callback + - Sets up the hook executor with the session for logging + - Executes the hook and handles errors + - Triggers shutdown on critical failures (HookExecutionError) + + Args: + lease_scope: LeaseScope containing session, socket_path, and client info + report_status: Async callback to report status changes to controller + shutdown: Callback to trigger exporter shutdown (accepts optional exit_code kwarg) + """ + try: + # Verify lease scope is ready - for after-lease this should always be true + # since we've already processed the lease, but check defensively + if not lease_scope.is_ready(): + logger.warning("LeaseScope not ready for after-lease hook, skipping") + await report_status(ExporterStatus.AVAILABLE, "Available for new lease") + return + + # Check if hook is configured + if not self.config.after_lease: + logger.debug("No after-lease hook configured") + await report_status(ExporterStatus.AVAILABLE, "Available for new lease") + return + + await report_status(ExporterStatus.AFTER_LEASE_HOOK, "Running afterLease hooks") + + # Execute hook with lease scope + logger.info("Executing after-lease hook for lease %s", lease_scope.lease_name) + await self._execute_hook( + self.config.after_lease, + lease_scope, + LogSource.AFTER_LEASE_HOOK, + ) + + await report_status(ExporterStatus.AVAILABLE, "Available for new lease") + logger.info("afterLease hook completed successfully") + + except HookExecutionError as e: + if e.should_shutdown_exporter(): + # on_failure='exit' - shut down the entire exporter + logger.error("afterLease hook failed with on_failure='exit': %s", e) + await report_status( + ExporterStatus.AFTER_LEASE_HOOK_FAILED, + f"afterLease hook failed (on_failure=exit, shutting down): {e}", + ) + logger.error("Shutting down exporter due to afterLease hook failure with on_failure='exit'") + # Exit code 1 tells the CLI not to restart the exporter + shutdown(exit_code=1) + else: + # on_failure='endLease' - lease already ended, just report the failure + # The exporter remains available for new leases + logger.error("afterLease hook failed with on_failure='endLease': %s", e) + await report_status( + ExporterStatus.AFTER_LEASE_HOOK_FAILED, + f"afterLease hook failed (on_failure=endLease): {e}", + ) + # Note: Lease has already ended - no shutdown needed, exporter remains available + + except Exception as e: + logger.error("afterLease hook failed with unexpected error: %s", e, exc_info=True) + await report_status( + ExporterStatus.AFTER_LEASE_HOOK_FAILED, + f"afterLease hook failed: {e}", + ) + # Unexpected errors don't trigger shutdown - exporter remains available diff --git a/packages/jumpstarter/jumpstarter/exporter/hooks_test.py b/packages/jumpstarter/jumpstarter/exporter/hooks_test.py new file mode 100644 index 000000000..d39a6ecca --- /dev/null +++ b/packages/jumpstarter/jumpstarter/exporter/hooks_test.py @@ -0,0 +1,303 @@ +import asyncio +from typing import Callable +from unittest.mock import AsyncMock, Mock, call, patch + +import pytest + +from jumpstarter.config.env import JMP_DRIVERS_ALLOW, JUMPSTARTER_HOST +from jumpstarter.config.exporter import HookConfigV1Alpha1, HookInstanceConfigV1Alpha1 +from jumpstarter.driver import Driver +from jumpstarter.exporter.hooks import HookExecutionError, HookExecutor + +pytestmark = pytest.mark.anyio + + +class MockDriver(Driver): + @classmethod + def client(cls) -> str: + return "test.MockClient" + + def close(self) -> None: + pass + + def reset(self) -> None: + pass + + +@pytest.fixture +def mock_device_factory() -> Callable[[], MockDriver]: + def factory() -> MockDriver: + return MockDriver() + + return factory + + +@pytest.fixture +def hook_config() -> HookConfigV1Alpha1: + return HookConfigV1Alpha1( + before_lease=HookInstanceConfigV1Alpha1(script="echo 'Pre-lease hook executed'", timeout=10), + after_lease=HookInstanceConfigV1Alpha1(script="echo 'Post-lease hook executed'", timeout=10), + ) + + +@pytest.fixture +def lease_scope(): + from anyio import Event + + from jumpstarter.exporter.lease_context import LeaseContext + + lease_scope = LeaseContext( + lease_name="test-lease-123", + before_lease_hook=Event(), + client_name="test-client", + ) + # Add mock session to lease_scope + mock_session = Mock() + lease_scope.session = mock_session + lease_scope.socket_path = "/tmp/test_socket" + return lease_scope + + +class TestHookExecutor: + async def test_hook_executor_creation(self, hook_config, mock_device_factory) -> None: + executor = HookExecutor( + config=hook_config, + device_factory=mock_device_factory, + ) + + assert executor.config == hook_config + assert executor.device_factory == mock_device_factory + + async def test_empty_hook_execution(self, mock_device_factory, lease_scope) -> None: + empty_config = HookConfigV1Alpha1() + executor = HookExecutor( + config=empty_config, + device_factory=mock_device_factory, + ) + + # Both hooks should return None for empty/None commands + assert await executor.execute_before_lease_hook(lease_scope) is None + assert await executor.execute_after_lease_hook(lease_scope) is None + + async def test_successful_hook_execution(self, mock_device_factory, lease_scope) -> None: + hook_config = HookConfigV1Alpha1( + before_lease=HookInstanceConfigV1Alpha1(script="echo 'Pre-lease hook executed'", timeout=10), + ) + # Mock asyncio.create_subprocess_shell to simulate successful execution + mock_process = AsyncMock() + mock_process.returncode = 0 + # Mock stdout.readline to simulate line-by-line output + mock_process.stdout.readline.side_effect = [ + b"Pre-lease hook executed\n", + b"", # EOF + ] + mock_process.wait = AsyncMock(return_value=None) + + with patch("asyncio.create_subprocess_shell", return_value=mock_process) as mock_subprocess: + executor = HookExecutor( + config=hook_config, + device_factory=mock_device_factory, + ) + + result = await executor.execute_before_lease_hook(lease_scope) + + assert result is None + + # Verify subprocess was called with correct environment + mock_subprocess.assert_called_once() + call_args = mock_subprocess.call_args + command = call_args[0][0] + env = call_args[1]["env"] + + assert command == "echo 'Pre-lease hook executed'" + assert JUMPSTARTER_HOST in env + assert env[JUMPSTARTER_HOST] == "/tmp/test_socket" + assert env[JMP_DRIVERS_ALLOW] == "UNSAFE" + assert env["LEASE_NAME"] == "test-lease-123" + assert env["CLIENT_NAME"] == "test-client" + + async def test_failed_hook_execution(self, mock_device_factory, lease_scope) -> None: + failed_config = HookConfigV1Alpha1( + before_lease=HookInstanceConfigV1Alpha1( + script="exit 1", timeout=10, on_failure="endLease" + ), # Command that will fail with on_failure="endLease" + ) + + # Mock failed process + mock_process = AsyncMock() + mock_process.returncode = 1 + # Mock stdout.readline for failed process + mock_process.stdout.readline.side_effect = [ + b"Command failed\n", + b"", # EOF + ] + mock_process.wait = AsyncMock(return_value=None) + + with patch("asyncio.create_subprocess_shell", return_value=mock_process): + executor = HookExecutor( + config=failed_config, + device_factory=mock_device_factory, + ) + + # Should raise HookExecutionError since on_failure="endLease" + with pytest.raises(HookExecutionError, match="Hook failed with exit code 1"): + await executor.execute_before_lease_hook(lease_scope) + + async def test_hook_timeout(self, mock_device_factory, lease_scope) -> None: + timeout_config = HookConfigV1Alpha1( + before_lease=HookInstanceConfigV1Alpha1( + script="sleep 60", timeout=1, on_failure="exit" + ), # Command that will timeout with on_failure="exit" + ) + + # Mock process that times out + mock_process = AsyncMock() + mock_process.stdout.readline.return_value = b"" # EOF + mock_process.terminate = AsyncMock(return_value=None) + mock_process.wait = AsyncMock(return_value=None) + + with ( + patch("asyncio.create_subprocess_shell", return_value=mock_process), + patch("asyncio.wait_for", side_effect=asyncio.TimeoutError()), + ): + executor = HookExecutor( + config=timeout_config, + device_factory=mock_device_factory, + ) + + # Should raise HookExecutionError since on_failure="exit" + with pytest.raises(HookExecutionError, match="timed out after 1 seconds"): + await executor.execute_before_lease_hook(lease_scope) + + mock_process.terminate.assert_called_once() + + async def test_hook_environment_variables(self, mock_device_factory, lease_scope) -> None: + hook_config = HookConfigV1Alpha1( + before_lease=HookInstanceConfigV1Alpha1(script="echo 'Pre-lease hook executed'", timeout=10), + ) + mock_process = AsyncMock() + mock_process.returncode = 0 + # Mock stdout.readline for environment test + mock_process.stdout.readline.side_effect = [ + b"", # EOF (no output) + ] + mock_process.wait = AsyncMock(return_value=None) + + with patch("asyncio.create_subprocess_shell", return_value=mock_process) as mock_subprocess: + executor = HookExecutor( + config=hook_config, + device_factory=mock_device_factory, + ) + + await executor.execute_before_lease_hook(lease_scope) + + # Check that expected environment variables are set (unused fields removed) + call_args = mock_subprocess.call_args + env = call_args[1]["env"] + + assert env["LEASE_NAME"] == "test-lease-123" + assert env["CLIENT_NAME"] == "test-client" + # These fields are no longer set: + assert "LEASE_DURATION" not in env + assert "EXPORTER_NAME" not in env + assert "EXPORTER_NAMESPACE" not in env + assert env[JUMPSTARTER_HOST] == "/tmp/test_socket" + assert env[JMP_DRIVERS_ALLOW] == "UNSAFE" + + async def test_real_time_output_logging(self, mock_device_factory, lease_scope) -> None: + """Test that hook output is logged in real-time at INFO level.""" + hook_config = HookConfigV1Alpha1( + before_lease=HookInstanceConfigV1Alpha1(script="echo 'Line 1'; echo 'Line 2'; echo 'Line 3'", timeout=10), + ) + + mock_process = AsyncMock() + mock_process.returncode = 0 + # Mock multiple lines of output to verify streaming + mock_process.stdout.readline.side_effect = [ + b"Line 1\n", + b"Line 2\n", + b"Line 3\n", + b"", # EOF + ] + mock_process.wait = AsyncMock(return_value=None) + + # Mock the logger to capture log calls + with ( + patch("jumpstarter.exporter.hooks.logger") as mock_logger, + patch("asyncio.create_subprocess_shell", return_value=mock_process), + ): + executor = HookExecutor( + config=hook_config, + device_factory=mock_device_factory, + ) + + result = await executor.execute_before_lease_hook(lease_scope) + + assert result is None + + # Verify that output lines were logged in real-time at INFO level + expected_calls = [ + call("Executing before-lease hook for lease %s", "test-lease-123"), + call("Executing hook: %s", "echo 'Line 1'; echo 'Line 2'; echo 'Line 3'"), + call("Hook executed successfully"), + ] + mock_logger.info.assert_has_calls(expected_calls, any_order=False) + + async def test_post_lease_hook_execution_on_completion(self, mock_device_factory, lease_scope) -> None: + """Test that post-lease hook executes when called directly.""" + hook_config = HookConfigV1Alpha1( + after_lease=HookInstanceConfigV1Alpha1(script="echo 'Post-lease cleanup completed'", timeout=10), + ) + + mock_process = AsyncMock() + mock_process.returncode = 0 + # Mock post-lease hook output + mock_process.stdout.readline.side_effect = [ + b"Post-lease cleanup completed\n", + b"", # EOF + ] + mock_process.wait = AsyncMock(return_value=None) + + # Mock the logger to capture log calls + with ( + patch("jumpstarter.exporter.hooks.logger") as mock_logger, + patch("asyncio.create_subprocess_shell", return_value=mock_process), + ): + executor = HookExecutor( + config=hook_config, + device_factory=mock_device_factory, + ) + + result = await executor.execute_after_lease_hook(lease_scope) + + assert result is None + + # Verify that post-lease hook output was logged + expected_calls = [ + call("Executing after-lease hook for lease %s", "test-lease-123"), + call("Executing hook: %s", "echo 'Post-lease cleanup completed'"), + call("Hook executed successfully"), + ] + mock_logger.info.assert_has_calls(expected_calls, any_order=False) + + async def test_hook_timeout_with_warn(self, mock_device_factory, lease_scope) -> None: + """Test that hook succeeds when timeout occurs but on_failure='warn'.""" + hook_config = HookConfigV1Alpha1( + before_lease=HookInstanceConfigV1Alpha1(script="sleep 60", timeout=1, on_failure="warn"), + ) + + mock_process = AsyncMock() + mock_process.stdout.readline.return_value = b"" # EOF + mock_process.terminate = AsyncMock(return_value=None) + mock_process.wait = AsyncMock(return_value=None) + + with ( + patch("asyncio.create_subprocess_shell", return_value=mock_process), + patch("asyncio.wait_for", side_effect=asyncio.TimeoutError()), + patch("jumpstarter.exporter.hooks.logger") as mock_logger, + ): + executor = HookExecutor(config=hook_config, device_factory=mock_device_factory) + result = await executor.execute_before_lease_hook(lease_scope) + assert result is None + # Verify WARNING log was created + assert any("on_failure=warn, continuing" in str(call) for call in mock_logger.warning.call_args_list) diff --git a/packages/jumpstarter/jumpstarter/exporter/lease_context.py b/packages/jumpstarter/jumpstarter/exporter/lease_context.py new file mode 100644 index 000000000..9e8878392 --- /dev/null +++ b/packages/jumpstarter/jumpstarter/exporter/lease_context.py @@ -0,0 +1,84 @@ +"""LeaseScope: Context manager for lease-related resources. + +This module provides a clean abstraction for managing the lifecycle of resources +associated with a lease, including the session, socket path, and synchronization events. +""" + +from dataclasses import dataclass, field +from typing import TYPE_CHECKING + +from anyio import Event + +from jumpstarter.common import ExporterStatus + +if TYPE_CHECKING: + from jumpstarter.exporter.session import Session + + +@dataclass +class LeaseContext: + """Encapsulates all resources associated with an active lease. + + This class bundles together the session, socket path, synchronization event, + and lease identity information that are needed throughout the lease lifecycle. + By grouping these resources, we make their relationships and lifecycles explicit. + + Attributes: + lease_name: Name of the current lease assigned by the controller + session: The Session object managing the device and gRPC services (set in handle_lease) + socket_path: Unix socket path where the session is serving (set in handle_lease) + before_lease_hook: Event that signals when before-lease hook completes + client_name: Name of the client currently holding the lease (empty if unleased) + current_status: Current exporter status (stored here for access before session is created) + status_message: Message describing the current status + """ + + lease_name: str + before_lease_hook: Event + session: "Session | None" = None + socket_path: str = "" + client_name: str = field(default="") + current_status: ExporterStatus = field(default=ExporterStatus.AVAILABLE) + status_message: str = field(default="") + + def __post_init__(self): + """Validate that required resources are present.""" + assert self.before_lease_hook is not None, "LeaseScope requires a before_lease_hook event" + assert self.lease_name, "LeaseScope requires a non-empty lease_name" + + def is_ready(self) -> bool: + """Check if the lease scope has been fully initialized with session and socket. + + Note: This checks for resource initialization (session/socket), not lease activity. + Use is_active() to check if the lease itself is active. + """ + return self.session is not None and self.socket_path != "" + + def is_active(self) -> bool: + """Check if this lease is active (has a non-empty lease name).""" + return bool(self.lease_name) + + def has_client(self) -> bool: + """Check if a client is currently holding the lease.""" + return bool(self.client_name) + + def update_client(self, client_name: str): + """Update the client name for this lease.""" + self.client_name = client_name + + def clear_client(self): + """Clear the client name when the lease is no longer held.""" + self.client_name = "" + + def update_status(self, status: ExporterStatus, message: str = ""): + """Update the current status in the lease context. + + This stores the status in the LeaseContext so it's available even before + the session is created, fixing the race condition where GetStatus is called + before the session can be updated. + """ + self.current_status = status + self.status_message = message + # Also update session if it exists + if self.session: + self.session.update_status(status, message) diff --git a/packages/jumpstarter/jumpstarter/exporter/logging.py b/packages/jumpstarter/jumpstarter/exporter/logging.py index 629306c29..6a6e8dad9 100644 --- a/packages/jumpstarter/jumpstarter/exporter/logging.py +++ b/packages/jumpstarter/jumpstarter/exporter/logging.py @@ -1,23 +1,53 @@ import logging from collections import deque +from contextlib import contextmanager +from threading import RLock from jumpstarter_protocol import jumpstarter_pb2 +from .logging_protocol import LoggerRegistration +from jumpstarter.common import LogSource + class LogHandler(logging.Handler): - def __init__(self, queue: deque): + def __init__(self, queue: deque, source: LogSource = LogSource.UNSPECIFIED): logging.Handler.__init__(self) self.queue = queue self.listener = None + self.source = source # LogSource enum value + self._lock = RLock() + self._child_handlers = {} # Dict of logger_name -> LogSource mappings + + def add_child_handler(self, logger_name: str, source: LogSource): + """Add a child handler that will route logs from a specific logger with a different source.""" + with self._lock: + self._child_handlers[logger_name] = source + + def remove_child_handler(self, logger_name: str): + """Remove a child handler mapping.""" + with self._lock: + self._child_handlers.pop(logger_name, None) + + def get_source_for_record(self, record): + """Determine the appropriate log source for a record.""" + with self._lock: + # Check if this record comes from a logger with a specific source mapping + logger_name = record.name + for mapped_logger, source in self._child_handlers.items(): + if logger_name.startswith(mapped_logger): + return source + return self.source def enqueue(self, record): self.queue.append(record) def prepare(self, record): + source = self.get_source_for_record(record) return jumpstarter_pb2.LogStreamResponse( uuid="", severity=record.levelname, message=self.format(record), + source=source.value, # Convert to proto value ) def emit(self, record): @@ -25,3 +55,35 @@ def emit(self, record): self.enqueue(self.prepare(record)) except Exception: self.handleError(record) + + @contextmanager + def context_log_source(self, logger_name: str, source: LogSource): + """Context manager to temporarily set a log source for a specific logger.""" + self.add_child_handler(logger_name, source) + try: + yield + finally: + self.remove_child_handler(logger_name) + + +def get_logger( + name: str, source: LogSource = LogSource.SYSTEM, session: LoggerRegistration | None = None +) -> logging.Logger: + """ + Get a logger with automatic LogSource mapping. + + Args: + name: Logger name (e.g., __name__ or custom name) + source: The LogSource to associate with this logger + session: Optional session to register with immediately + + Returns: + A standard Python logger instance + """ + logger = logging.getLogger(name) + + # If session provided, register the source mapping + if session: + session.add_logger_source(name, source) + + return logger diff --git a/packages/jumpstarter/jumpstarter/exporter/logging_protocol.py b/packages/jumpstarter/jumpstarter/exporter/logging_protocol.py new file mode 100644 index 000000000..04ed885f2 --- /dev/null +++ b/packages/jumpstarter/jumpstarter/exporter/logging_protocol.py @@ -0,0 +1,22 @@ +"""Protocol for logger registration to avoid circular dependencies.""" + +from typing import Protocol + +from jumpstarter.common import LogSource + + +class LoggerRegistration(Protocol): + """Protocol for objects that can register logger sources. + + This protocol defines the interface for objects that can associate + logger names with log sources, enabling proper routing of log messages. + """ + + def add_logger_source(self, logger_name: str, source: LogSource) -> None: + """Register a logger name with its corresponding log source. + + Args: + logger_name: Name of the logger to register + source: The log source category for this logger + """ + ... diff --git a/packages/jumpstarter/jumpstarter/exporter/session.py b/packages/jumpstarter/jumpstarter/exporter/session.py index 63ae2f08d..663c39e03 100644 --- a/packages/jumpstarter/jumpstarter/exporter/session.py +++ b/packages/jumpstarter/jumpstarter/exporter/session.py @@ -4,7 +4,7 @@ from contextlib import asynccontextmanager, contextmanager, suppress from dataclasses import dataclass, field from logging.handlers import QueueHandler -from typing import Self +from typing import TYPE_CHECKING, Self from uuid import UUID import grpc @@ -17,13 +17,15 @@ ) from .logging import LogHandler -from jumpstarter.common import Metadata, TemporarySocket +from jumpstarter.common import ExporterStatus, LogSource, Metadata, TemporarySocket from jumpstarter.common.streams import StreamRequestMetadata -from jumpstarter.driver import Driver from jumpstarter.streams.common import forward_stream from jumpstarter.streams.metadata import MetadataStreamAttributes from jumpstarter.streams.router import RouterStream +if TYPE_CHECKING: + from jumpstarter.driver import Driver + logger = logging.getLogger(__name__) @@ -34,11 +36,14 @@ class Session( Metadata, ContextManagerMixin, ): - root_device: Driver - mapping: dict[UUID, Driver] + root_device: "Driver" + mapping: dict[UUID, "Driver"] _logging_queue: deque = field(init=False) _logging_handler: QueueHandler = field(init=False) + _current_status: ExporterStatus = field(init=False, default=ExporterStatus.AVAILABLE) + _status_message: str = field(init=False, default="") + _status_update_event: Event = field(init=False) @contextmanager def __contextmanager__(self) -> Generator[Self]: @@ -67,7 +72,11 @@ def __init__(self, *args, root_device, **kwargs): self.mapping = {u: i for (u, _, _, i) in self.root_device.enumerate()} self._logging_queue = deque(maxlen=32) - self._logging_handler = LogHandler(self._logging_queue) + self._logging_handler = LogHandler(self._logging_queue, LogSource.SYSTEM) + self._status_update_event = Event() + + # Map all driver logs to DRIVER source + self._logging_handler.add_child_handler("driver.", LogSource.DRIVER) @asynccontextmanager async def serve_port_async(self, port): @@ -139,3 +148,31 @@ async def LogStream(self, request, context): yield self._logging_queue.popleft() except IndexError: await sleep(0.5) + + def update_status(self, status: int | ExporterStatus, message: str = ""): + """Update the current exporter status for the session.""" + if isinstance(status, int): + self._current_status = ExporterStatus.from_proto(status) + else: + self._current_status = status + self._status_message = message + + def add_logger_source(self, logger_name: str, source: LogSource): + """Add a log source mapping for a specific logger.""" + self._logging_handler.add_child_handler(logger_name, source) + + def remove_logger_source(self, logger_name: str): + """Remove a log source mapping for a specific logger.""" + self._logging_handler.remove_child_handler(logger_name) + + def context_log_source(self, logger_name: str, source: LogSource): + """Context manager to temporarily set a log source for a specific logger.""" + return self._logging_handler.context_log_source(logger_name, source) + + async def GetStatus(self, request, context): + """Get the current exporter status.""" + logger.debug("GetStatus() -> %s", self._current_status) + return jumpstarter_pb2.GetStatusResponse( + status=self._current_status.to_proto(), + message=self._status_message, + )