diff --git a/python/pyspark/sql/connect/proto/pipelines_pb2.py b/python/pyspark/sql/connect/proto/pipelines_pb2.py index 04ff64796961e..750e456002578 100644 --- a/python/pyspark/sql/connect/proto/pipelines_pb2.py +++ b/python/pyspark/sql/connect/proto/pipelines_pb2.py @@ -39,7 +39,7 @@ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( - b'\n\x1dspark/connect/pipelines.proto\x12\rspark.connect\x1a\x1dspark/connect/relations.proto\x1a\x19spark/connect/types.proto"\x8c\x11\n\x0fPipelineCommand\x12h\n\x15\x63reate_dataflow_graph\x18\x01 \x01(\x0b\x32\x32.spark.connect.PipelineCommand.CreateDataflowGraphH\x00R\x13\x63reateDataflowGraph\x12U\n\x0e\x64\x65\x66ine_dataset\x18\x02 \x01(\x0b\x32,.spark.connect.PipelineCommand.DefineDatasetH\x00R\rdefineDataset\x12L\n\x0b\x64\x65\x66ine_flow\x18\x03 \x01(\x0b\x32).spark.connect.PipelineCommand.DefineFlowH\x00R\ndefineFlow\x12\x62\n\x13\x64rop_dataflow_graph\x18\x04 \x01(\x0b\x32\x30.spark.connect.PipelineCommand.DropDataflowGraphH\x00R\x11\x64ropDataflowGraph\x12\x46\n\tstart_run\x18\x05 \x01(\x0b\x32\'.spark.connect.PipelineCommand.StartRunH\x00R\x08startRun\x12\x62\n\x19\x64\x65\x66ine_sql_graph_elements\x18\x06 \x01(\x0b\x32%.spark.connect.DefineSqlGraphElementsH\x00R\x16\x64\x65\x66ineSqlGraphElements\x1a\x87\x03\n\x13\x43reateDataflowGraph\x12,\n\x0f\x64\x65\x66\x61ult_catalog\x18\x01 \x01(\tH\x00R\x0e\x64\x65\x66\x61ultCatalog\x88\x01\x01\x12.\n\x10\x64\x65\x66\x61ult_database\x18\x02 \x01(\tH\x01R\x0f\x64\x65\x66\x61ultDatabase\x88\x01\x01\x12Z\n\x08sql_conf\x18\x05 \x03(\x0b\x32?.spark.connect.PipelineCommand.CreateDataflowGraph.SqlConfEntryR\x07sqlConf\x1a:\n\x0cSqlConfEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\x1aQ\n\x08Response\x12/\n\x11\x64\x61taflow_graph_id\x18\x01 \x01(\tH\x00R\x0f\x64\x61taflowGraphId\x88\x01\x01\x42\x14\n\x12_dataflow_graph_idB\x12\n\x10_default_catalogB\x13\n\x11_default_database\x1aZ\n\x11\x44ropDataflowGraph\x12/\n\x11\x64\x61taflow_graph_id\x18\x01 \x01(\tH\x00R\x0f\x64\x61taflowGraphId\x88\x01\x01\x42\x14\n\x12_dataflow_graph_id\x1a\xd1\x04\n\rDefineDataset\x12/\n\x11\x64\x61taflow_graph_id\x18\x01 \x01(\tH\x00R\x0f\x64\x61taflowGraphId\x88\x01\x01\x12&\n\x0c\x64\x61taset_name\x18\x02 \x01(\tH\x01R\x0b\x64\x61tasetName\x88\x01\x01\x12\x42\n\x0c\x64\x61taset_type\x18\x03 \x01(\x0e\x32\x1a.spark.connect.DatasetTypeH\x02R\x0b\x64\x61tasetType\x88\x01\x01\x12\x1d\n\x07\x63omment\x18\x04 \x01(\tH\x03R\x07\x63omment\x88\x01\x01\x12l\n\x10table_properties\x18\x05 \x03(\x0b\x32\x41.spark.connect.PipelineCommand.DefineDataset.TablePropertiesEntryR\x0ftableProperties\x12%\n\x0epartition_cols\x18\x06 \x03(\tR\rpartitionCols\x12\x34\n\x06schema\x18\x07 \x01(\x0b\x32\x17.spark.connect.DataTypeH\x04R\x06schema\x88\x01\x01\x12\x1b\n\x06\x66ormat\x18\x08 \x01(\tH\x05R\x06\x66ormat\x88\x01\x01\x1a\x42\n\x14TablePropertiesEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\x42\x14\n\x12_dataflow_graph_idB\x0f\n\r_dataset_nameB\x0f\n\r_dataset_typeB\n\n\x08_commentB\t\n\x07_schemaB\t\n\x07_format\x1a\xbc\x03\n\nDefineFlow\x12/\n\x11\x64\x61taflow_graph_id\x18\x01 \x01(\tH\x00R\x0f\x64\x61taflowGraphId\x88\x01\x01\x12 \n\tflow_name\x18\x02 \x01(\tH\x01R\x08\x66lowName\x88\x01\x01\x12\x33\n\x13target_dataset_name\x18\x03 \x01(\tH\x02R\x11targetDatasetName\x88\x01\x01\x12\x30\n\x04plan\x18\x04 \x01(\x0b\x32\x17.spark.connect.RelationH\x03R\x04plan\x88\x01\x01\x12Q\n\x08sql_conf\x18\x05 \x03(\x0b\x32\x36.spark.connect.PipelineCommand.DefineFlow.SqlConfEntryR\x07sqlConf\x12\x17\n\x04once\x18\x06 \x01(\x08H\x04R\x04once\x88\x01\x01\x1a:\n\x0cSqlConfEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\x42\x14\n\x12_dataflow_graph_idB\x0c\n\n_flow_nameB\x16\n\x14_target_dataset_nameB\x07\n\x05_planB\x07\n\x05_once\x1aQ\n\x08StartRun\x12/\n\x11\x64\x61taflow_graph_id\x18\x01 \x01(\tH\x00R\x0f\x64\x61taflowGraphId\x88\x01\x01\x42\x14\n\x12_dataflow_graph_idB\x0e\n\x0c\x63ommand_type"\xc7\x01\n\x16\x44\x65\x66ineSqlGraphElements\x12/\n\x11\x64\x61taflow_graph_id\x18\x01 \x01(\tH\x00R\x0f\x64\x61taflowGraphId\x88\x01\x01\x12\'\n\rsql_file_path\x18\x02 \x01(\tH\x01R\x0bsqlFilePath\x88\x01\x01\x12\x1e\n\x08sql_text\x18\x03 \x01(\tH\x02R\x07sqlText\x88\x01\x01\x42\x14\n\x12_dataflow_graph_idB\x10\n\x0e_sql_file_pathB\x0b\n\t_sql_text"\x8e\x02\n\x15PipelineCommandResult\x12\x81\x01\n\x1c\x63reate_dataflow_graph_result\x18\x01 \x01(\x0b\x32>.spark.connect.PipelineCommandResult.CreateDataflowGraphResultH\x00R\x19\x63reateDataflowGraphResult\x1a\x62\n\x19\x43reateDataflowGraphResult\x12/\n\x11\x64\x61taflow_graph_id\x18\x01 \x01(\tH\x00R\x0f\x64\x61taflowGraphId\x88\x01\x01\x42\x14\n\x12_dataflow_graph_idB\r\n\x0bresult_type"I\n\x13PipelineEventResult\x12\x32\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x1c.spark.connect.PipelineEventR\x05\x65vent"k\n\rPipelineEvent\x12!\n\ttimestamp\x18\x01 \x01(\tH\x00R\ttimestamp\x88\x01\x01\x12\x1d\n\x07message\x18\x02 \x01(\tH\x01R\x07message\x88\x01\x01\x42\x0c\n\n_timestampB\n\n\x08_message*a\n\x0b\x44\x61tasetType\x12\x1c\n\x18\x44\x41TASET_TYPE_UNSPECIFIED\x10\x00\x12\x15\n\x11MATERIALIZED_VIEW\x10\x01\x12\t\n\x05TABLE\x10\x02\x12\x12\n\x0eTEMPORARY_VIEW\x10\x03\x42"\n\x1eorg.apache.spark.connect.protoP\x01\x62\x06proto3' + b'\n\x1dspark/connect/pipelines.proto\x12\rspark.connect\x1a\x1dspark/connect/relations.proto\x1a\x19spark/connect/types.proto"\xe6\x12\n\x0fPipelineCommand\x12h\n\x15\x63reate_dataflow_graph\x18\x01 \x01(\x0b\x32\x32.spark.connect.PipelineCommand.CreateDataflowGraphH\x00R\x13\x63reateDataflowGraph\x12U\n\x0e\x64\x65\x66ine_dataset\x18\x02 \x01(\x0b\x32,.spark.connect.PipelineCommand.DefineDatasetH\x00R\rdefineDataset\x12L\n\x0b\x64\x65\x66ine_flow\x18\x03 \x01(\x0b\x32).spark.connect.PipelineCommand.DefineFlowH\x00R\ndefineFlow\x12\x62\n\x13\x64rop_dataflow_graph\x18\x04 \x01(\x0b\x32\x30.spark.connect.PipelineCommand.DropDataflowGraphH\x00R\x11\x64ropDataflowGraph\x12\x46\n\tstart_run\x18\x05 \x01(\x0b\x32\'.spark.connect.PipelineCommand.StartRunH\x00R\x08startRun\x12r\n\x19\x64\x65\x66ine_sql_graph_elements\x18\x06 \x01(\x0b\x32\x35.spark.connect.PipelineCommand.DefineSqlGraphElementsH\x00R\x16\x64\x65\x66ineSqlGraphElements\x1a\x87\x03\n\x13\x43reateDataflowGraph\x12,\n\x0f\x64\x65\x66\x61ult_catalog\x18\x01 \x01(\tH\x00R\x0e\x64\x65\x66\x61ultCatalog\x88\x01\x01\x12.\n\x10\x64\x65\x66\x61ult_database\x18\x02 \x01(\tH\x01R\x0f\x64\x65\x66\x61ultDatabase\x88\x01\x01\x12Z\n\x08sql_conf\x18\x05 \x03(\x0b\x32?.spark.connect.PipelineCommand.CreateDataflowGraph.SqlConfEntryR\x07sqlConf\x1a:\n\x0cSqlConfEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\x1aQ\n\x08Response\x12/\n\x11\x64\x61taflow_graph_id\x18\x01 \x01(\tH\x00R\x0f\x64\x61taflowGraphId\x88\x01\x01\x42\x14\n\x12_dataflow_graph_idB\x12\n\x10_default_catalogB\x13\n\x11_default_database\x1aZ\n\x11\x44ropDataflowGraph\x12/\n\x11\x64\x61taflow_graph_id\x18\x01 \x01(\tH\x00R\x0f\x64\x61taflowGraphId\x88\x01\x01\x42\x14\n\x12_dataflow_graph_id\x1a\xd1\x04\n\rDefineDataset\x12/\n\x11\x64\x61taflow_graph_id\x18\x01 \x01(\tH\x00R\x0f\x64\x61taflowGraphId\x88\x01\x01\x12&\n\x0c\x64\x61taset_name\x18\x02 \x01(\tH\x01R\x0b\x64\x61tasetName\x88\x01\x01\x12\x42\n\x0c\x64\x61taset_type\x18\x03 \x01(\x0e\x32\x1a.spark.connect.DatasetTypeH\x02R\x0b\x64\x61tasetType\x88\x01\x01\x12\x1d\n\x07\x63omment\x18\x04 \x01(\tH\x03R\x07\x63omment\x88\x01\x01\x12l\n\x10table_properties\x18\x05 \x03(\x0b\x32\x41.spark.connect.PipelineCommand.DefineDataset.TablePropertiesEntryR\x0ftableProperties\x12%\n\x0epartition_cols\x18\x06 \x03(\tR\rpartitionCols\x12\x34\n\x06schema\x18\x07 \x01(\x0b\x32\x17.spark.connect.DataTypeH\x04R\x06schema\x88\x01\x01\x12\x1b\n\x06\x66ormat\x18\x08 \x01(\tH\x05R\x06\x66ormat\x88\x01\x01\x1a\x42\n\x14TablePropertiesEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\x42\x14\n\x12_dataflow_graph_idB\x0f\n\r_dataset_nameB\x0f\n\r_dataset_typeB\n\n\x08_commentB\t\n\x07_schemaB\t\n\x07_format\x1a\xbc\x03\n\nDefineFlow\x12/\n\x11\x64\x61taflow_graph_id\x18\x01 \x01(\tH\x00R\x0f\x64\x61taflowGraphId\x88\x01\x01\x12 \n\tflow_name\x18\x02 \x01(\tH\x01R\x08\x66lowName\x88\x01\x01\x12\x33\n\x13target_dataset_name\x18\x03 \x01(\tH\x02R\x11targetDatasetName\x88\x01\x01\x12\x30\n\x04plan\x18\x04 \x01(\x0b\x32\x17.spark.connect.RelationH\x03R\x04plan\x88\x01\x01\x12Q\n\x08sql_conf\x18\x05 \x03(\x0b\x32\x36.spark.connect.PipelineCommand.DefineFlow.SqlConfEntryR\x07sqlConf\x12\x17\n\x04once\x18\x06 \x01(\x08H\x04R\x04once\x88\x01\x01\x1a:\n\x0cSqlConfEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\x42\x14\n\x12_dataflow_graph_idB\x0c\n\n_flow_nameB\x16\n\x14_target_dataset_nameB\x07\n\x05_planB\x07\n\x05_once\x1a\xc7\x01\n\x16\x44\x65\x66ineSqlGraphElements\x12/\n\x11\x64\x61taflow_graph_id\x18\x01 \x01(\tH\x00R\x0f\x64\x61taflowGraphId\x88\x01\x01\x12\'\n\rsql_file_path\x18\x02 \x01(\tH\x01R\x0bsqlFilePath\x88\x01\x01\x12\x1e\n\x08sql_text\x18\x03 \x01(\tH\x02R\x07sqlText\x88\x01\x01\x42\x14\n\x12_dataflow_graph_idB\x10\n\x0e_sql_file_pathB\x0b\n\t_sql_text\x1aQ\n\x08StartRun\x12/\n\x11\x64\x61taflow_graph_id\x18\x01 \x01(\tH\x00R\x0f\x64\x61taflowGraphId\x88\x01\x01\x42\x14\n\x12_dataflow_graph_idB\x0e\n\x0c\x63ommand_type"\x8e\x02\n\x15PipelineCommandResult\x12\x81\x01\n\x1c\x63reate_dataflow_graph_result\x18\x01 \x01(\x0b\x32>.spark.connect.PipelineCommandResult.CreateDataflowGraphResultH\x00R\x19\x63reateDataflowGraphResult\x1a\x62\n\x19\x43reateDataflowGraphResult\x12/\n\x11\x64\x61taflow_graph_id\x18\x01 \x01(\tH\x00R\x0f\x64\x61taflowGraphId\x88\x01\x01\x42\x14\n\x12_dataflow_graph_idB\r\n\x0bresult_type"I\n\x13PipelineEventResult\x12\x32\n\x05\x65vent\x18\x01 \x01(\x0b\x32\x1c.spark.connect.PipelineEventR\x05\x65vent"k\n\rPipelineEvent\x12!\n\ttimestamp\x18\x01 \x01(\tH\x00R\ttimestamp\x88\x01\x01\x12\x1d\n\x07message\x18\x02 \x01(\tH\x01R\x07message\x88\x01\x01\x42\x0c\n\n_timestampB\n\n\x08_message*a\n\x0b\x44\x61tasetType\x12\x1c\n\x18\x44\x41TASET_TYPE_UNSPECIFIED\x10\x00\x12\x15\n\x11MATERIALIZED_VIEW\x10\x01\x12\t\n\x05TABLE\x10\x02\x12\x12\n\x0eTEMPORARY_VIEW\x10\x03\x42"\n\x1eorg.apache.spark.connect.protoP\x01\x62\x06proto3' ) _globals = globals() @@ -56,36 +56,36 @@ _globals["_PIPELINECOMMAND_DEFINEDATASET_TABLEPROPERTIESENTRY"]._serialized_options = b"8\001" _globals["_PIPELINECOMMAND_DEFINEFLOW_SQLCONFENTRY"]._loaded_options = None _globals["_PIPELINECOMMAND_DEFINEFLOW_SQLCONFENTRY"]._serialized_options = b"8\001" - _globals["_DATASETTYPE"]._serialized_start = 2956 - _globals["_DATASETTYPE"]._serialized_end = 3053 + _globals["_DATASETTYPE"]._serialized_start = 2972 + _globals["_DATASETTYPE"]._serialized_end = 3069 _globals["_PIPELINECOMMAND"]._serialized_start = 107 - _globals["_PIPELINECOMMAND"]._serialized_end = 2295 - _globals["_PIPELINECOMMAND_CREATEDATAFLOWGRAPH"]._serialized_start = 670 - _globals["_PIPELINECOMMAND_CREATEDATAFLOWGRAPH"]._serialized_end = 1061 - _globals["_PIPELINECOMMAND_CREATEDATAFLOWGRAPH_SQLCONFENTRY"]._serialized_start = 879 - _globals["_PIPELINECOMMAND_CREATEDATAFLOWGRAPH_SQLCONFENTRY"]._serialized_end = 937 - _globals["_PIPELINECOMMAND_CREATEDATAFLOWGRAPH_RESPONSE"]._serialized_start = 939 - _globals["_PIPELINECOMMAND_CREATEDATAFLOWGRAPH_RESPONSE"]._serialized_end = 1020 - _globals["_PIPELINECOMMAND_DROPDATAFLOWGRAPH"]._serialized_start = 1063 - _globals["_PIPELINECOMMAND_DROPDATAFLOWGRAPH"]._serialized_end = 1153 - _globals["_PIPELINECOMMAND_DEFINEDATASET"]._serialized_start = 1156 - _globals["_PIPELINECOMMAND_DEFINEDATASET"]._serialized_end = 1749 - _globals["_PIPELINECOMMAND_DEFINEDATASET_TABLEPROPERTIESENTRY"]._serialized_start = 1593 - _globals["_PIPELINECOMMAND_DEFINEDATASET_TABLEPROPERTIESENTRY"]._serialized_end = 1659 - _globals["_PIPELINECOMMAND_DEFINEFLOW"]._serialized_start = 1752 - _globals["_PIPELINECOMMAND_DEFINEFLOW"]._serialized_end = 2196 - _globals["_PIPELINECOMMAND_DEFINEFLOW_SQLCONFENTRY"]._serialized_start = 879 - _globals["_PIPELINECOMMAND_DEFINEFLOW_SQLCONFENTRY"]._serialized_end = 937 - _globals["_PIPELINECOMMAND_STARTRUN"]._serialized_start = 2198 - _globals["_PIPELINECOMMAND_STARTRUN"]._serialized_end = 2279 - _globals["_DEFINESQLGRAPHELEMENTS"]._serialized_start = 2298 - _globals["_DEFINESQLGRAPHELEMENTS"]._serialized_end = 2497 - _globals["_PIPELINECOMMANDRESULT"]._serialized_start = 2500 - _globals["_PIPELINECOMMANDRESULT"]._serialized_end = 2770 - _globals["_PIPELINECOMMANDRESULT_CREATEDATAFLOWGRAPHRESULT"]._serialized_start = 2657 - _globals["_PIPELINECOMMANDRESULT_CREATEDATAFLOWGRAPHRESULT"]._serialized_end = 2755 - _globals["_PIPELINEEVENTRESULT"]._serialized_start = 2772 - _globals["_PIPELINEEVENTRESULT"]._serialized_end = 2845 - _globals["_PIPELINEEVENT"]._serialized_start = 2847 - _globals["_PIPELINEEVENT"]._serialized_end = 2954 + _globals["_PIPELINECOMMAND"]._serialized_end = 2513 + _globals["_PIPELINECOMMAND_CREATEDATAFLOWGRAPH"]._serialized_start = 686 + _globals["_PIPELINECOMMAND_CREATEDATAFLOWGRAPH"]._serialized_end = 1077 + _globals["_PIPELINECOMMAND_CREATEDATAFLOWGRAPH_SQLCONFENTRY"]._serialized_start = 895 + _globals["_PIPELINECOMMAND_CREATEDATAFLOWGRAPH_SQLCONFENTRY"]._serialized_end = 953 + _globals["_PIPELINECOMMAND_CREATEDATAFLOWGRAPH_RESPONSE"]._serialized_start = 955 + _globals["_PIPELINECOMMAND_CREATEDATAFLOWGRAPH_RESPONSE"]._serialized_end = 1036 + _globals["_PIPELINECOMMAND_DROPDATAFLOWGRAPH"]._serialized_start = 1079 + _globals["_PIPELINECOMMAND_DROPDATAFLOWGRAPH"]._serialized_end = 1169 + _globals["_PIPELINECOMMAND_DEFINEDATASET"]._serialized_start = 1172 + _globals["_PIPELINECOMMAND_DEFINEDATASET"]._serialized_end = 1765 + _globals["_PIPELINECOMMAND_DEFINEDATASET_TABLEPROPERTIESENTRY"]._serialized_start = 1609 + _globals["_PIPELINECOMMAND_DEFINEDATASET_TABLEPROPERTIESENTRY"]._serialized_end = 1675 + _globals["_PIPELINECOMMAND_DEFINEFLOW"]._serialized_start = 1768 + _globals["_PIPELINECOMMAND_DEFINEFLOW"]._serialized_end = 2212 + _globals["_PIPELINECOMMAND_DEFINEFLOW_SQLCONFENTRY"]._serialized_start = 895 + _globals["_PIPELINECOMMAND_DEFINEFLOW_SQLCONFENTRY"]._serialized_end = 953 + _globals["_PIPELINECOMMAND_DEFINESQLGRAPHELEMENTS"]._serialized_start = 2215 + _globals["_PIPELINECOMMAND_DEFINESQLGRAPHELEMENTS"]._serialized_end = 2414 + _globals["_PIPELINECOMMAND_STARTRUN"]._serialized_start = 2416 + _globals["_PIPELINECOMMAND_STARTRUN"]._serialized_end = 2497 + _globals["_PIPELINECOMMANDRESULT"]._serialized_start = 2516 + _globals["_PIPELINECOMMANDRESULT"]._serialized_end = 2786 + _globals["_PIPELINECOMMANDRESULT_CREATEDATAFLOWGRAPHRESULT"]._serialized_start = 2673 + _globals["_PIPELINECOMMANDRESULT_CREATEDATAFLOWGRAPHRESULT"]._serialized_end = 2771 + _globals["_PIPELINEEVENTRESULT"]._serialized_start = 2788 + _globals["_PIPELINEEVENTRESULT"]._serialized_end = 2861 + _globals["_PIPELINEEVENT"]._serialized_start = 2863 + _globals["_PIPELINEEVENT"]._serialized_end = 2970 # @@protoc_insertion_point(module_scope) diff --git a/python/pyspark/sql/connect/proto/pipelines_pb2.pyi b/python/pyspark/sql/connect/proto/pipelines_pb2.pyi index 6402a6593e707..1eafaf44f3736 100644 --- a/python/pyspark/sql/connect/proto/pipelines_pb2.pyi +++ b/python/pyspark/sql/connect/proto/pipelines_pb2.pyi @@ -33,6 +33,7 @@ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ + import builtins import collections.abc import google.protobuf.descriptor @@ -58,7 +59,7 @@ class _DatasetType: class _DatasetTypeEnumTypeWrapper( google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_DatasetType.ValueType], builtins.type, -): # noqa: F821 +): DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor DATASET_TYPE_UNSPECIFIED: _DatasetType.ValueType # 0 """Safe default value. Should not be used.""" @@ -82,16 +83,19 @@ TEMPORARY_VIEW: DatasetType.ValueType # 3 """A view which is not published to the catalog""" global___DatasetType = DatasetType +@typing.final class PipelineCommand(google.protobuf.message.Message): """Dispatch object for pipelines commands. See each individual command for documentation.""" DESCRIPTOR: google.protobuf.descriptor.Descriptor + @typing.final class CreateDataflowGraph(google.protobuf.message.Message): """Request to create a new dataflow graph.""" DESCRIPTOR: google.protobuf.descriptor.Descriptor + @typing.final class SqlConfEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -106,9 +110,10 @@ class PipelineCommand(google.protobuf.message.Message): value: builtins.str = ..., ) -> None: ... def ClearField( - self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"] + self, field_name: typing.Literal["key", b"key", "value", b"value"] ) -> None: ... + @typing.final class Response(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -122,7 +127,7 @@ class PipelineCommand(google.protobuf.message.Message): ) -> None: ... def HasField( self, - field_name: typing_extensions.Literal[ + field_name: typing.Literal[ "_dataflow_graph_id", b"_dataflow_graph_id", "dataflow_graph_id", @@ -131,7 +136,7 @@ class PipelineCommand(google.protobuf.message.Message): ) -> builtins.bool: ... def ClearField( self, - field_name: typing_extensions.Literal[ + field_name: typing.Literal[ "_dataflow_graph_id", b"_dataflow_graph_id", "dataflow_graph_id", @@ -139,9 +144,8 @@ class PipelineCommand(google.protobuf.message.Message): ], ) -> None: ... def WhichOneof( - self, - oneof_group: typing_extensions.Literal["_dataflow_graph_id", b"_dataflow_graph_id"], - ) -> typing_extensions.Literal["dataflow_graph_id"] | None: ... + self, oneof_group: typing.Literal["_dataflow_graph_id", b"_dataflow_graph_id"] + ) -> typing.Literal["dataflow_graph_id"] | None: ... DEFAULT_CATALOG_FIELD_NUMBER: builtins.int DEFAULT_DATABASE_FIELD_NUMBER: builtins.int @@ -164,7 +168,7 @@ class PipelineCommand(google.protobuf.message.Message): ) -> None: ... def HasField( self, - field_name: typing_extensions.Literal[ + field_name: typing.Literal[ "_default_catalog", b"_default_catalog", "_default_database", @@ -177,7 +181,7 @@ class PipelineCommand(google.protobuf.message.Message): ) -> builtins.bool: ... def ClearField( self, - field_name: typing_extensions.Literal[ + field_name: typing.Literal[ "_default_catalog", b"_default_catalog", "_default_database", @@ -192,13 +196,14 @@ class PipelineCommand(google.protobuf.message.Message): ) -> None: ... @typing.overload def WhichOneof( - self, oneof_group: typing_extensions.Literal["_default_catalog", b"_default_catalog"] - ) -> typing_extensions.Literal["default_catalog"] | None: ... + self, oneof_group: typing.Literal["_default_catalog", b"_default_catalog"] + ) -> typing.Literal["default_catalog"] | None: ... @typing.overload def WhichOneof( - self, oneof_group: typing_extensions.Literal["_default_database", b"_default_database"] - ) -> typing_extensions.Literal["default_database"] | None: ... + self, oneof_group: typing.Literal["_default_database", b"_default_database"] + ) -> typing.Literal["default_database"] | None: ... + @typing.final class DropDataflowGraph(google.protobuf.message.Message): """Drops the graph and stops any running attached flows.""" @@ -214,7 +219,7 @@ class PipelineCommand(google.protobuf.message.Message): ) -> None: ... def HasField( self, - field_name: typing_extensions.Literal[ + field_name: typing.Literal[ "_dataflow_graph_id", b"_dataflow_graph_id", "dataflow_graph_id", @@ -223,7 +228,7 @@ class PipelineCommand(google.protobuf.message.Message): ) -> builtins.bool: ... def ClearField( self, - field_name: typing_extensions.Literal[ + field_name: typing.Literal[ "_dataflow_graph_id", b"_dataflow_graph_id", "dataflow_graph_id", @@ -231,15 +236,16 @@ class PipelineCommand(google.protobuf.message.Message): ], ) -> None: ... def WhichOneof( - self, - oneof_group: typing_extensions.Literal["_dataflow_graph_id", b"_dataflow_graph_id"], - ) -> typing_extensions.Literal["dataflow_graph_id"] | None: ... + self, oneof_group: typing.Literal["_dataflow_graph_id", b"_dataflow_graph_id"] + ) -> typing.Literal["dataflow_graph_id"] | None: ... + @typing.final class DefineDataset(google.protobuf.message.Message): """Request to define a dataset: a table, a materialized view, or a temporary view.""" DESCRIPTOR: google.protobuf.descriptor.Descriptor + @typing.final class TablePropertiesEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -254,7 +260,7 @@ class PipelineCommand(google.protobuf.message.Message): value: builtins.str = ..., ) -> None: ... def ClearField( - self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"] + self, field_name: typing.Literal["key", b"key", "value", b"value"] ) -> None: ... DATAFLOW_GRAPH_ID_FIELD_NUMBER: builtins.int @@ -273,6 +279,10 @@ class PipelineCommand(google.protobuf.message.Message): """The type of the dataset.""" comment: builtins.str """Optional comment for the dataset.""" + format: builtins.str + """The output table format of the dataset. Only applies to dataset_type == TABLE and + dataset_type == MATERIALIZED_VIEW. + """ @property def table_properties( self, @@ -288,10 +298,6 @@ class PipelineCommand(google.protobuf.message.Message): @property def schema(self) -> pyspark.sql.connect.proto.types_pb2.DataType: """Schema for the dataset. If unset, this will be inferred from incoming flows.""" - format: builtins.str - """The output table format of the dataset. Only applies to dataset_type == TABLE and - dataset_type == MATERIALIZED_VIEW. - """ def __init__( self, *, @@ -306,7 +312,7 @@ class PipelineCommand(google.protobuf.message.Message): ) -> None: ... def HasField( self, - field_name: typing_extensions.Literal[ + field_name: typing.Literal[ "_comment", b"_comment", "_dataflow_graph_id", @@ -335,7 +341,7 @@ class PipelineCommand(google.protobuf.message.Message): ) -> builtins.bool: ... def ClearField( self, - field_name: typing_extensions.Literal[ + field_name: typing.Literal[ "_comment", b"_comment", "_dataflow_graph_id", @@ -368,35 +374,36 @@ class PipelineCommand(google.protobuf.message.Message): ) -> None: ... @typing.overload def WhichOneof( - self, oneof_group: typing_extensions.Literal["_comment", b"_comment"] - ) -> typing_extensions.Literal["comment"] | None: ... + self, oneof_group: typing.Literal["_comment", b"_comment"] + ) -> typing.Literal["comment"] | None: ... @typing.overload def WhichOneof( - self, - oneof_group: typing_extensions.Literal["_dataflow_graph_id", b"_dataflow_graph_id"], - ) -> typing_extensions.Literal["dataflow_graph_id"] | None: ... + self, oneof_group: typing.Literal["_dataflow_graph_id", b"_dataflow_graph_id"] + ) -> typing.Literal["dataflow_graph_id"] | None: ... @typing.overload def WhichOneof( - self, oneof_group: typing_extensions.Literal["_dataset_name", b"_dataset_name"] - ) -> typing_extensions.Literal["dataset_name"] | None: ... + self, oneof_group: typing.Literal["_dataset_name", b"_dataset_name"] + ) -> typing.Literal["dataset_name"] | None: ... @typing.overload def WhichOneof( - self, oneof_group: typing_extensions.Literal["_dataset_type", b"_dataset_type"] - ) -> typing_extensions.Literal["dataset_type"] | None: ... + self, oneof_group: typing.Literal["_dataset_type", b"_dataset_type"] + ) -> typing.Literal["dataset_type"] | None: ... @typing.overload def WhichOneof( - self, oneof_group: typing_extensions.Literal["_format", b"_format"] - ) -> typing_extensions.Literal["format"] | None: ... + self, oneof_group: typing.Literal["_format", b"_format"] + ) -> typing.Literal["format"] | None: ... @typing.overload def WhichOneof( - self, oneof_group: typing_extensions.Literal["_schema", b"_schema"] - ) -> typing_extensions.Literal["schema"] | None: ... + self, oneof_group: typing.Literal["_schema", b"_schema"] + ) -> typing.Literal["schema"] | None: ... + @typing.final class DefineFlow(google.protobuf.message.Message): """Request to define a flow targeting a dataset.""" DESCRIPTOR: google.protobuf.descriptor.Descriptor + @typing.final class SqlConfEntry(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -411,7 +418,7 @@ class PipelineCommand(google.protobuf.message.Message): value: builtins.str = ..., ) -> None: ... def ClearField( - self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"] + self, field_name: typing.Literal["key", b"key", "value", b"value"] ) -> None: ... DATAFLOW_GRAPH_ID_FIELD_NUMBER: builtins.int @@ -426,6 +433,8 @@ class PipelineCommand(google.protobuf.message.Message): """Name of the flow. For standalone flows, this must be a single-part name.""" target_dataset_name: builtins.str """Name of the dataset this flow writes to. Can be partially or fully qualified.""" + once: builtins.bool + """If true, this flow will only be run once per full refresh.""" @property def plan(self) -> pyspark.sql.connect.proto.relations_pb2.Relation: """An unresolved relation that defines the dataset's flow.""" @@ -434,8 +443,6 @@ class PipelineCommand(google.protobuf.message.Message): self, ) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]: """SQL configurations set when running this flow.""" - once: builtins.bool - """If true, this flow will only be run once per full refresh.""" def __init__( self, *, @@ -448,7 +455,7 @@ class PipelineCommand(google.protobuf.message.Message): ) -> None: ... def HasField( self, - field_name: typing_extensions.Literal[ + field_name: typing.Literal[ "_dataflow_graph_id", b"_dataflow_graph_id", "_flow_name", @@ -473,7 +480,7 @@ class PipelineCommand(google.protobuf.message.Message): ) -> builtins.bool: ... def ClearField( self, - field_name: typing_extensions.Literal[ + field_name: typing.Literal[ "_dataflow_graph_id", b"_dataflow_graph_id", "_flow_name", @@ -500,27 +507,95 @@ class PipelineCommand(google.protobuf.message.Message): ) -> None: ... @typing.overload def WhichOneof( - self, - oneof_group: typing_extensions.Literal["_dataflow_graph_id", b"_dataflow_graph_id"], - ) -> typing_extensions.Literal["dataflow_graph_id"] | None: ... + self, oneof_group: typing.Literal["_dataflow_graph_id", b"_dataflow_graph_id"] + ) -> typing.Literal["dataflow_graph_id"] | None: ... @typing.overload def WhichOneof( - self, oneof_group: typing_extensions.Literal["_flow_name", b"_flow_name"] - ) -> typing_extensions.Literal["flow_name"] | None: ... + self, oneof_group: typing.Literal["_flow_name", b"_flow_name"] + ) -> typing.Literal["flow_name"] | None: ... @typing.overload def WhichOneof( - self, oneof_group: typing_extensions.Literal["_once", b"_once"] - ) -> typing_extensions.Literal["once"] | None: ... + self, oneof_group: typing.Literal["_once", b"_once"] + ) -> typing.Literal["once"] | None: ... @typing.overload def WhichOneof( - self, oneof_group: typing_extensions.Literal["_plan", b"_plan"] - ) -> typing_extensions.Literal["plan"] | None: ... + self, oneof_group: typing.Literal["_plan", b"_plan"] + ) -> typing.Literal["plan"] | None: ... @typing.overload def WhichOneof( + self, oneof_group: typing.Literal["_target_dataset_name", b"_target_dataset_name"] + ) -> typing.Literal["target_dataset_name"] | None: ... + + @typing.final + class DefineSqlGraphElements(google.protobuf.message.Message): + """Parses the SQL file and registers all datasets and flows.""" + + DESCRIPTOR: google.protobuf.descriptor.Descriptor + + DATAFLOW_GRAPH_ID_FIELD_NUMBER: builtins.int + SQL_FILE_PATH_FIELD_NUMBER: builtins.int + SQL_TEXT_FIELD_NUMBER: builtins.int + dataflow_graph_id: builtins.str + """The graph to attach this dataset to.""" + sql_file_path: builtins.str + """The full path to the SQL file. Can be relative or absolute.""" + sql_text: builtins.str + """The contents of the SQL file.""" + def __init__( + self, + *, + dataflow_graph_id: builtins.str | None = ..., + sql_file_path: builtins.str | None = ..., + sql_text: builtins.str | None = ..., + ) -> None: ... + def HasField( self, - oneof_group: typing_extensions.Literal["_target_dataset_name", b"_target_dataset_name"], - ) -> typing_extensions.Literal["target_dataset_name"] | None: ... + field_name: typing.Literal[ + "_dataflow_graph_id", + b"_dataflow_graph_id", + "_sql_file_path", + b"_sql_file_path", + "_sql_text", + b"_sql_text", + "dataflow_graph_id", + b"dataflow_graph_id", + "sql_file_path", + b"sql_file_path", + "sql_text", + b"sql_text", + ], + ) -> builtins.bool: ... + def ClearField( + self, + field_name: typing.Literal[ + "_dataflow_graph_id", + b"_dataflow_graph_id", + "_sql_file_path", + b"_sql_file_path", + "_sql_text", + b"_sql_text", + "dataflow_graph_id", + b"dataflow_graph_id", + "sql_file_path", + b"sql_file_path", + "sql_text", + b"sql_text", + ], + ) -> None: ... + @typing.overload + def WhichOneof( + self, oneof_group: typing.Literal["_dataflow_graph_id", b"_dataflow_graph_id"] + ) -> typing.Literal["dataflow_graph_id"] | None: ... + @typing.overload + def WhichOneof( + self, oneof_group: typing.Literal["_sql_file_path", b"_sql_file_path"] + ) -> typing.Literal["sql_file_path"] | None: ... + @typing.overload + def WhichOneof( + self, oneof_group: typing.Literal["_sql_text", b"_sql_text"] + ) -> typing.Literal["sql_text"] | None: ... + @typing.final class StartRun(google.protobuf.message.Message): """Resolves all datasets and flows and start a pipeline update. Should be called after all graph elements are registered. @@ -538,7 +613,7 @@ class PipelineCommand(google.protobuf.message.Message): ) -> None: ... def HasField( self, - field_name: typing_extensions.Literal[ + field_name: typing.Literal[ "_dataflow_graph_id", b"_dataflow_graph_id", "dataflow_graph_id", @@ -547,7 +622,7 @@ class PipelineCommand(google.protobuf.message.Message): ) -> builtins.bool: ... def ClearField( self, - field_name: typing_extensions.Literal[ + field_name: typing.Literal[ "_dataflow_graph_id", b"_dataflow_graph_id", "dataflow_graph_id", @@ -555,9 +630,8 @@ class PipelineCommand(google.protobuf.message.Message): ], ) -> None: ... def WhichOneof( - self, - oneof_group: typing_extensions.Literal["_dataflow_graph_id", b"_dataflow_graph_id"], - ) -> typing_extensions.Literal["dataflow_graph_id"] | None: ... + self, oneof_group: typing.Literal["_dataflow_graph_id", b"_dataflow_graph_id"] + ) -> typing.Literal["dataflow_graph_id"] | None: ... CREATE_DATAFLOW_GRAPH_FIELD_NUMBER: builtins.int DEFINE_DATASET_FIELD_NUMBER: builtins.int @@ -576,7 +650,7 @@ class PipelineCommand(google.protobuf.message.Message): @property def start_run(self) -> global___PipelineCommand.StartRun: ... @property - def define_sql_graph_elements(self) -> global___DefineSqlGraphElements: ... + def define_sql_graph_elements(self) -> global___PipelineCommand.DefineSqlGraphElements: ... def __init__( self, *, @@ -585,11 +659,11 @@ class PipelineCommand(google.protobuf.message.Message): define_flow: global___PipelineCommand.DefineFlow | None = ..., drop_dataflow_graph: global___PipelineCommand.DropDataflowGraph | None = ..., start_run: global___PipelineCommand.StartRun | None = ..., - define_sql_graph_elements: global___DefineSqlGraphElements | None = ..., + define_sql_graph_elements: global___PipelineCommand.DefineSqlGraphElements | None = ..., ) -> None: ... def HasField( self, - field_name: typing_extensions.Literal[ + field_name: typing.Literal[ "command_type", b"command_type", "create_dataflow_graph", @@ -608,7 +682,7 @@ class PipelineCommand(google.protobuf.message.Message): ) -> builtins.bool: ... def ClearField( self, - field_name: typing_extensions.Literal[ + field_name: typing.Literal[ "command_type", b"command_type", "create_dataflow_graph", @@ -626,9 +700,9 @@ class PipelineCommand(google.protobuf.message.Message): ], ) -> None: ... def WhichOneof( - self, oneof_group: typing_extensions.Literal["command_type", b"command_type"] + self, oneof_group: typing.Literal["command_type", b"command_type"] ) -> ( - typing_extensions.Literal[ + typing.Literal[ "create_dataflow_graph", "define_dataset", "define_flow", @@ -641,81 +715,13 @@ class PipelineCommand(google.protobuf.message.Message): global___PipelineCommand = PipelineCommand -class DefineSqlGraphElements(google.protobuf.message.Message): - """Parses the SQL file and registers all datasets and flows.""" - - DESCRIPTOR: google.protobuf.descriptor.Descriptor - - DATAFLOW_GRAPH_ID_FIELD_NUMBER: builtins.int - SQL_FILE_PATH_FIELD_NUMBER: builtins.int - SQL_TEXT_FIELD_NUMBER: builtins.int - dataflow_graph_id: builtins.str - """The graph to attach this dataset to.""" - sql_file_path: builtins.str - """The full path to the SQL file. Can be relative or absolute.""" - sql_text: builtins.str - """The contents of the SQL file.""" - def __init__( - self, - *, - dataflow_graph_id: builtins.str | None = ..., - sql_file_path: builtins.str | None = ..., - sql_text: builtins.str | None = ..., - ) -> None: ... - def HasField( - self, - field_name: typing_extensions.Literal[ - "_dataflow_graph_id", - b"_dataflow_graph_id", - "_sql_file_path", - b"_sql_file_path", - "_sql_text", - b"_sql_text", - "dataflow_graph_id", - b"dataflow_graph_id", - "sql_file_path", - b"sql_file_path", - "sql_text", - b"sql_text", - ], - ) -> builtins.bool: ... - def ClearField( - self, - field_name: typing_extensions.Literal[ - "_dataflow_graph_id", - b"_dataflow_graph_id", - "_sql_file_path", - b"_sql_file_path", - "_sql_text", - b"_sql_text", - "dataflow_graph_id", - b"dataflow_graph_id", - "sql_file_path", - b"sql_file_path", - "sql_text", - b"sql_text", - ], - ) -> None: ... - @typing.overload - def WhichOneof( - self, oneof_group: typing_extensions.Literal["_dataflow_graph_id", b"_dataflow_graph_id"] - ) -> typing_extensions.Literal["dataflow_graph_id"] | None: ... - @typing.overload - def WhichOneof( - self, oneof_group: typing_extensions.Literal["_sql_file_path", b"_sql_file_path"] - ) -> typing_extensions.Literal["sql_file_path"] | None: ... - @typing.overload - def WhichOneof( - self, oneof_group: typing_extensions.Literal["_sql_text", b"_sql_text"] - ) -> typing_extensions.Literal["sql_text"] | None: ... - -global___DefineSqlGraphElements = DefineSqlGraphElements - +@typing.final class PipelineCommandResult(google.protobuf.message.Message): """Dispatch object for pipelines command results.""" DESCRIPTOR: google.protobuf.descriptor.Descriptor + @typing.final class CreateDataflowGraphResult(google.protobuf.message.Message): DESCRIPTOR: google.protobuf.descriptor.Descriptor @@ -729,7 +735,7 @@ class PipelineCommandResult(google.protobuf.message.Message): ) -> None: ... def HasField( self, - field_name: typing_extensions.Literal[ + field_name: typing.Literal[ "_dataflow_graph_id", b"_dataflow_graph_id", "dataflow_graph_id", @@ -738,7 +744,7 @@ class PipelineCommandResult(google.protobuf.message.Message): ) -> builtins.bool: ... def ClearField( self, - field_name: typing_extensions.Literal[ + field_name: typing.Literal[ "_dataflow_graph_id", b"_dataflow_graph_id", "dataflow_graph_id", @@ -746,9 +752,8 @@ class PipelineCommandResult(google.protobuf.message.Message): ], ) -> None: ... def WhichOneof( - self, - oneof_group: typing_extensions.Literal["_dataflow_graph_id", b"_dataflow_graph_id"], - ) -> typing_extensions.Literal["dataflow_graph_id"] | None: ... + self, oneof_group: typing.Literal["_dataflow_graph_id", b"_dataflow_graph_id"] + ) -> typing.Literal["dataflow_graph_id"] | None: ... CREATE_DATAFLOW_GRAPH_RESULT_FIELD_NUMBER: builtins.int @property @@ -763,7 +768,7 @@ class PipelineCommandResult(google.protobuf.message.Message): ) -> None: ... def HasField( self, - field_name: typing_extensions.Literal[ + field_name: typing.Literal[ "create_dataflow_graph_result", b"create_dataflow_graph_result", "result_type", @@ -772,7 +777,7 @@ class PipelineCommandResult(google.protobuf.message.Message): ) -> builtins.bool: ... def ClearField( self, - field_name: typing_extensions.Literal[ + field_name: typing.Literal[ "create_dataflow_graph_result", b"create_dataflow_graph_result", "result_type", @@ -780,11 +785,12 @@ class PipelineCommandResult(google.protobuf.message.Message): ], ) -> None: ... def WhichOneof( - self, oneof_group: typing_extensions.Literal["result_type", b"result_type"] - ) -> typing_extensions.Literal["create_dataflow_graph_result"] | None: ... + self, oneof_group: typing.Literal["result_type", b"result_type"] + ) -> typing.Literal["create_dataflow_graph_result"] | None: ... global___PipelineCommandResult = PipelineCommandResult +@typing.final class PipelineEventResult(google.protobuf.message.Message): """A response containing an event emitted during the run of a pipeline.""" @@ -798,13 +804,12 @@ class PipelineEventResult(google.protobuf.message.Message): *, event: global___PipelineEvent | None = ..., ) -> None: ... - def HasField( - self, field_name: typing_extensions.Literal["event", b"event"] - ) -> builtins.bool: ... - def ClearField(self, field_name: typing_extensions.Literal["event", b"event"]) -> None: ... + def HasField(self, field_name: typing.Literal["event", b"event"]) -> builtins.bool: ... + def ClearField(self, field_name: typing.Literal["event", b"event"]) -> None: ... global___PipelineEventResult = PipelineEventResult +@typing.final class PipelineEvent(google.protobuf.message.Message): """An event emitted during the run of a graph.""" @@ -824,7 +829,7 @@ class PipelineEvent(google.protobuf.message.Message): ) -> None: ... def HasField( self, - field_name: typing_extensions.Literal[ + field_name: typing.Literal[ "_message", b"_message", "_timestamp", @@ -837,7 +842,7 @@ class PipelineEvent(google.protobuf.message.Message): ) -> builtins.bool: ... def ClearField( self, - field_name: typing_extensions.Literal[ + field_name: typing.Literal[ "_message", b"_message", "_timestamp", @@ -850,11 +855,11 @@ class PipelineEvent(google.protobuf.message.Message): ) -> None: ... @typing.overload def WhichOneof( - self, oneof_group: typing_extensions.Literal["_message", b"_message"] - ) -> typing_extensions.Literal["message"] | None: ... + self, oneof_group: typing.Literal["_message", b"_message"] + ) -> typing.Literal["message"] | None: ... @typing.overload def WhichOneof( - self, oneof_group: typing_extensions.Literal["_timestamp", b"_timestamp"] - ) -> typing_extensions.Literal["timestamp"] | None: ... + self, oneof_group: typing.Literal["_timestamp", b"_timestamp"] + ) -> typing.Literal["timestamp"] | None: ... global___PipelineEvent = PipelineEvent diff --git a/sql/connect/common/src/main/protobuf/spark/connect/pipelines.proto b/sql/connect/common/src/main/protobuf/spark/connect/pipelines.proto index ea94f9b03bfc8..f0e72b697f253 100644 --- a/sql/connect/common/src/main/protobuf/spark/connect/pipelines.proto +++ b/sql/connect/common/src/main/protobuf/spark/connect/pipelines.proto @@ -109,6 +109,18 @@ message PipelineCommand { optional bool once = 6; } + // Parses the SQL file and registers all datasets and flows. + message DefineSqlGraphElements { + // The graph to attach this dataset to. + optional string dataflow_graph_id = 1; + + // The full path to the SQL file. Can be relative or absolute. + optional string sql_file_path = 2; + + // The contents of the SQL file. + optional string sql_text = 3; + } + // Resolves all datasets and flows and start a pipeline update. Should be called after all // graph elements are registered. message StartRun { @@ -117,18 +129,6 @@ message PipelineCommand { } } -// Parses the SQL file and registers all datasets and flows. -message DefineSqlGraphElements { - // The graph to attach this dataset to. - optional string dataflow_graph_id = 1; - - // The full path to the SQL file. Can be relative or absolute. - optional string sql_file_path = 2; - - // The contents of the SQL file. - optional string sql_text = 3; -} - // Dispatch object for pipelines command results. message PipelineCommandResult { oneof result_type {