diff --git a/api/services/control/control.pb.go b/api/services/control/control.pb.go index 2703e042ae02..e31181a05696 100644 --- a/api/services/control/control.pb.go +++ b/api/services/control/control.pb.go @@ -642,7 +642,10 @@ type CacheOptionsEntry struct { Type string `protobuf:"bytes,1,opt,name=Type,proto3" json:"Type,omitempty"` // Attrs are like mode=(min,max), ref=example.com:5000/foo/bar . // See cache importer/exporter implementations' documentation. - Attrs map[string]string `protobuf:"bytes,2,rep,name=Attrs,proto3" json:"Attrs,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + Attrs map[string]string `protobuf:"bytes,2,rep,name=Attrs,proto3" json:"Attrs,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + // ID identifies this exporter. + // ID should be treated by the exporter as opaque. + ID string `protobuf:"bytes,3,opt,name=ID,proto3" json:"ID,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -691,11 +694,24 @@ func (x *CacheOptionsEntry) GetAttrs() map[string]string { return nil } +func (x *CacheOptionsEntry) GetID() string { + if x != nil { + return x.ID + } + return "" +} + type SolveResponse struct { - state protoimpl.MessageState `protogen:"open.v1"` - ExporterResponse map[string]string `protobuf:"bytes,1,rep,name=ExporterResponse,proto3" json:"ExporterResponse,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` - unknownFields protoimpl.UnknownFields - sizeCache protoimpl.SizeCache + state protoimpl.MessageState `protogen:"open.v1"` + // ExporterResponseDeprecated is a combined exporter response - it aggregates + // responses from all exporters (including cache exporters) running in parallel. + // It is deprecated in favor of the structured exporter responses but will be + // populated as long as it is supported. + ExporterResponseDeprecated map[string]string `protobuf:"bytes,1,rep,name=ExporterResponseDeprecated,proto3" json:"ExporterResponseDeprecated,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + ExporterResponses []*ExporterResponse `protobuf:"bytes,2,rep,name=exporterResponses,proto3" json:"exporterResponses,omitempty"` + CacheExporterResponses []*ExporterResponse `protobuf:"bytes,3,rep,name=cacheExporterResponses,proto3" json:"cacheExporterResponses,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache } func (x *SolveResponse) Reset() { @@ -728,9 +744,23 @@ func (*SolveResponse) Descriptor() ([]byte, []int) { return file_github_com_moby_buildkit_api_services_control_control_proto_rawDescGZIP(), []int{7} } -func (x *SolveResponse) GetExporterResponse() map[string]string { +func (x *SolveResponse) GetExporterResponseDeprecated() map[string]string { if x != nil { - return x.ExporterResponse + return x.ExporterResponseDeprecated + } + return nil +} + +func (x *SolveResponse) GetExporterResponses() []*ExporterResponse { + if x != nil { + return x.ExporterResponses + } + return nil +} + +func (x *SolveResponse) GetCacheExporterResponses() []*ExporterResponse { + if x != nil { + return x.CacheExporterResponses } return nil } @@ -1973,7 +2003,10 @@ type Exporter struct { // Type identifies the exporter Type string `protobuf:"bytes,1,opt,name=Type,proto3" json:"Type,omitempty"` // Attrs specifies exporter configuration - Attrs map[string]string `protobuf:"bytes,2,rep,name=Attrs,proto3" json:"Attrs,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + Attrs map[string]string `protobuf:"bytes,2,rep,name=Attrs,proto3" json:"Attrs,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + // ID identifies this exporter. + // ID should be treated by the exporter as opaque. + ID string `protobuf:"bytes,3,opt,name=ID,proto3" json:"ID,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } @@ -2022,6 +2055,123 @@ func (x *Exporter) GetAttrs() map[string]string { return nil } +func (x *Exporter) GetID() string { + if x != nil { + return x.ID + } + return "" +} + +// ExporterResponse describes the output of an exporter +type ExporterResponse struct { + state protoimpl.MessageState `protogen:"open.v1"` + // Metadata describes the exporter + Metadata *ExporterMetadata `protobuf:"bytes,1,opt,name=metadata,proto3" json:"metadata,omitempty"` + // Data is the exporter's output + Data map[string]string `protobuf:"bytes,2,rep,name=data,proto3" json:"data,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *ExporterResponse) Reset() { + *x = ExporterResponse{} + mi := &file_github_com_moby_buildkit_api_services_control_control_proto_msgTypes[27] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *ExporterResponse) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ExporterResponse) ProtoMessage() {} + +func (x *ExporterResponse) ProtoReflect() protoreflect.Message { + mi := &file_github_com_moby_buildkit_api_services_control_control_proto_msgTypes[27] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ExporterResponse.ProtoReflect.Descriptor instead. +func (*ExporterResponse) Descriptor() ([]byte, []int) { + return file_github_com_moby_buildkit_api_services_control_control_proto_rawDescGZIP(), []int{27} +} + +func (x *ExporterResponse) GetMetadata() *ExporterMetadata { + if x != nil { + return x.Metadata + } + return nil +} + +func (x *ExporterResponse) GetData() map[string]string { + if x != nil { + return x.Data + } + return nil +} + +// ExporterMetadata describes the output exporter +type ExporterMetadata struct { + state protoimpl.MessageState `protogen:"open.v1"` + // ID identifies the exporter + ID string `protobuf:"bytes,1,opt,name=ID,proto3" json:"ID,omitempty"` + // Type identifies the exporter type + Type string `protobuf:"bytes,2,opt,name=type,proto3" json:"type,omitempty"` + unknownFields protoimpl.UnknownFields + sizeCache protoimpl.SizeCache +} + +func (x *ExporterMetadata) Reset() { + *x = ExporterMetadata{} + mi := &file_github_com_moby_buildkit_api_services_control_control_proto_msgTypes[28] + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + ms.StoreMessageInfo(mi) +} + +func (x *ExporterMetadata) String() string { + return protoimpl.X.MessageStringOf(x) +} + +func (*ExporterMetadata) ProtoMessage() {} + +func (x *ExporterMetadata) ProtoReflect() protoreflect.Message { + mi := &file_github_com_moby_buildkit_api_services_control_control_proto_msgTypes[28] + if x != nil { + ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) + if ms.LoadMessageInfo() == nil { + ms.StoreMessageInfo(mi) + } + return ms + } + return mi.MessageOf(x) +} + +// Deprecated: Use ExporterMetadata.ProtoReflect.Descriptor instead. +func (*ExporterMetadata) Descriptor() ([]byte, []int) { + return file_github_com_moby_buildkit_api_services_control_control_proto_rawDescGZIP(), []int{28} +} + +func (x *ExporterMetadata) GetID() string { + if x != nil { + return x.ID + } + return "" +} + +func (x *ExporterMetadata) GetType() string { + if x != nil { + return x.Type + } + return "" +} + var File_github_com_moby_buildkit_api_services_control_control_proto protoreflect.FileDescriptor const file_github_com_moby_buildkit_api_services_control_control_proto_rawDesc = "" + @@ -2095,17 +2245,20 @@ const file_github_com_moby_buildkit_api_services_control_control_proto_rawDesc = "\aImports\x18\x05 \x03(\v2#.moby.buildkit.v1.CacheOptionsEntryR\aImports\x1aH\n" + "\x1aExportAttrsDeprecatedEntry\x12\x10\n" + "\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n" + - "\x05value\x18\x02 \x01(\tR\x05value:\x028\x01\"\xa7\x01\n" + + "\x05value\x18\x02 \x01(\tR\x05value:\x028\x01\"\xb7\x01\n" + "\x11CacheOptionsEntry\x12\x12\n" + "\x04Type\x18\x01 \x01(\tR\x04Type\x12D\n" + - "\x05Attrs\x18\x02 \x03(\v2..moby.buildkit.v1.CacheOptionsEntry.AttrsEntryR\x05Attrs\x1a8\n" + + "\x05Attrs\x18\x02 \x03(\v2..moby.buildkit.v1.CacheOptionsEntry.AttrsEntryR\x05Attrs\x12\x0e\n" + + "\x02ID\x18\x03 \x01(\tR\x02ID\x1a8\n" + "\n" + "AttrsEntry\x12\x10\n" + "\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n" + - "\x05value\x18\x02 \x01(\tR\x05value:\x028\x01\"\xb7\x01\n" + - "\rSolveResponse\x12a\n" + - "\x10ExporterResponse\x18\x01 \x03(\v25.moby.buildkit.v1.SolveResponse.ExporterResponseEntryR\x10ExporterResponse\x1aC\n" + - "\x15ExporterResponseEntry\x12\x10\n" + + "\x05value\x18\x02 \x01(\tR\x05value:\x028\x01\"\x8d\x03\n" + + "\rSolveResponse\x12\x7f\n" + + "\x1aExporterResponseDeprecated\x18\x01 \x03(\v2?.moby.buildkit.v1.SolveResponse.ExporterResponseDeprecatedEntryR\x1aExporterResponseDeprecated\x12P\n" + + "\x11exporterResponses\x18\x02 \x03(\v2\".moby.buildkit.v1.ExporterResponseR\x11exporterResponses\x12Z\n" + + "\x16cacheExporterResponses\x18\x03 \x03(\v2\".moby.buildkit.v1.ExporterResponseR\x16cacheExporterResponses\x1aM\n" + + "\x1fExporterResponseDeprecatedEntry\x12\x10\n" + "\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n" + "\x05value\x18\x02 \x01(\tR\x05value:\x028\x01\"!\n" + "\rStatusRequest\x12\x10\n" + @@ -2220,14 +2373,24 @@ const file_github_com_moby_buildkit_api_services_control_control_proto_rawDesc = "\aResults\x18\x03 \x03(\v2..moby.buildkit.v1.BuildResultInfo.ResultsEntryR\aResults\x1aX\n" + "\fResultsEntry\x12\x10\n" + "\x03key\x18\x01 \x01(\x03R\x03key\x122\n" + - "\x05value\x18\x02 \x01(\v2\x1c.moby.buildkit.v1.DescriptorR\x05value:\x028\x01\"\x95\x01\n" + + "\x05value\x18\x02 \x01(\v2\x1c.moby.buildkit.v1.DescriptorR\x05value:\x028\x01\"\xa5\x01\n" + "\bExporter\x12\x12\n" + "\x04Type\x18\x01 \x01(\tR\x04Type\x12;\n" + - "\x05Attrs\x18\x02 \x03(\v2%.moby.buildkit.v1.Exporter.AttrsEntryR\x05Attrs\x1a8\n" + + "\x05Attrs\x18\x02 \x03(\v2%.moby.buildkit.v1.Exporter.AttrsEntryR\x05Attrs\x12\x0e\n" + + "\x02ID\x18\x03 \x01(\tR\x02ID\x1a8\n" + "\n" + "AttrsEntry\x12\x10\n" + "\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n" + - "\x05value\x18\x02 \x01(\tR\x05value:\x028\x01*?\n" + + "\x05value\x18\x02 \x01(\tR\x05value:\x028\x01\"\xcd\x01\n" + + "\x10ExporterResponse\x12>\n" + + "\bmetadata\x18\x01 \x01(\v2\".moby.buildkit.v1.ExporterMetadataR\bmetadata\x12@\n" + + "\x04data\x18\x02 \x03(\v2,.moby.buildkit.v1.ExporterResponse.DataEntryR\x04data\x1a7\n" + + "\tDataEntry\x12\x10\n" + + "\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n" + + "\x05value\x18\x02 \x01(\tR\x05value:\x028\x01\"6\n" + + "\x10ExporterMetadata\x12\x0e\n" + + "\x02ID\x18\x01 \x01(\tR\x02ID\x12\x12\n" + + "\x04type\x18\x02 \x01(\tR\x04type*?\n" + "\x15BuildHistoryEventType\x12\v\n" + "\aSTARTED\x10\x00\x12\f\n" + "\bCOMPLETE\x10\x01\x12\v\n" + @@ -2256,7 +2419,7 @@ func file_github_com_moby_buildkit_api_services_control_control_proto_rawDescGZI } var file_github_com_moby_buildkit_api_services_control_control_proto_enumTypes = make([]protoimpl.EnumInfo, 1) -var file_github_com_moby_buildkit_api_services_control_control_proto_msgTypes = make([]protoimpl.MessageInfo, 39) +var file_github_com_moby_buildkit_api_services_control_control_proto_msgTypes = make([]protoimpl.MessageInfo, 42) var file_github_com_moby_buildkit_api_services_control_control_proto_goTypes = []any{ (BuildHistoryEventType)(0), // 0: moby.buildkit.v1.BuildHistoryEventType (*PruneRequest)(nil), // 1: moby.buildkit.v1.PruneRequest @@ -2286,103 +2449,110 @@ var file_github_com_moby_buildkit_api_services_control_control_proto_goTypes = [ (*Descriptor)(nil), // 25: moby.buildkit.v1.Descriptor (*BuildResultInfo)(nil), // 26: moby.buildkit.v1.BuildResultInfo (*Exporter)(nil), // 27: moby.buildkit.v1.Exporter - nil, // 28: moby.buildkit.v1.SolveRequest.ExporterAttrsDeprecatedEntry - nil, // 29: moby.buildkit.v1.SolveRequest.FrontendAttrsEntry - nil, // 30: moby.buildkit.v1.SolveRequest.FrontendInputsEntry - nil, // 31: moby.buildkit.v1.CacheOptions.ExportAttrsDeprecatedEntry - nil, // 32: moby.buildkit.v1.CacheOptionsEntry.AttrsEntry - nil, // 33: moby.buildkit.v1.SolveResponse.ExporterResponseEntry - nil, // 34: moby.buildkit.v1.BuildHistoryRecord.FrontendAttrsEntry - nil, // 35: moby.buildkit.v1.BuildHistoryRecord.ExporterResponseEntry - nil, // 36: moby.buildkit.v1.BuildHistoryRecord.ResultsEntry - nil, // 37: moby.buildkit.v1.Descriptor.AnnotationsEntry - nil, // 38: moby.buildkit.v1.BuildResultInfo.ResultsEntry - nil, // 39: moby.buildkit.v1.Exporter.AttrsEntry - (*timestamp.Timestamp)(nil), // 40: google.protobuf.Timestamp - (*pb.Definition)(nil), // 41: pb.Definition - (*pb1.Policy)(nil), // 42: moby.buildkit.v1.sourcepolicy.Policy - (*pb.ProgressGroup)(nil), // 43: pb.ProgressGroup - (*pb.SourceInfo)(nil), // 44: pb.SourceInfo - (*pb.Range)(nil), // 45: pb.Range - (*types.WorkerRecord)(nil), // 46: moby.buildkit.v1.types.WorkerRecord - (*types.BuildkitVersion)(nil), // 47: moby.buildkit.v1.types.BuildkitVersion - (*status.Status)(nil), // 48: google.rpc.Status + (*ExporterResponse)(nil), // 28: moby.buildkit.v1.ExporterResponse + (*ExporterMetadata)(nil), // 29: moby.buildkit.v1.ExporterMetadata + nil, // 30: moby.buildkit.v1.SolveRequest.ExporterAttrsDeprecatedEntry + nil, // 31: moby.buildkit.v1.SolveRequest.FrontendAttrsEntry + nil, // 32: moby.buildkit.v1.SolveRequest.FrontendInputsEntry + nil, // 33: moby.buildkit.v1.CacheOptions.ExportAttrsDeprecatedEntry + nil, // 34: moby.buildkit.v1.CacheOptionsEntry.AttrsEntry + nil, // 35: moby.buildkit.v1.SolveResponse.ExporterResponseDeprecatedEntry + nil, // 36: moby.buildkit.v1.BuildHistoryRecord.FrontendAttrsEntry + nil, // 37: moby.buildkit.v1.BuildHistoryRecord.ExporterResponseEntry + nil, // 38: moby.buildkit.v1.BuildHistoryRecord.ResultsEntry + nil, // 39: moby.buildkit.v1.Descriptor.AnnotationsEntry + nil, // 40: moby.buildkit.v1.BuildResultInfo.ResultsEntry + nil, // 41: moby.buildkit.v1.Exporter.AttrsEntry + nil, // 42: moby.buildkit.v1.ExporterResponse.DataEntry + (*timestamp.Timestamp)(nil), // 43: google.protobuf.Timestamp + (*pb.Definition)(nil), // 44: pb.Definition + (*pb1.Policy)(nil), // 45: moby.buildkit.v1.sourcepolicy.Policy + (*pb.ProgressGroup)(nil), // 46: pb.ProgressGroup + (*pb.SourceInfo)(nil), // 47: pb.SourceInfo + (*pb.Range)(nil), // 48: pb.Range + (*types.WorkerRecord)(nil), // 49: moby.buildkit.v1.types.WorkerRecord + (*types.BuildkitVersion)(nil), // 50: moby.buildkit.v1.types.BuildkitVersion + (*status.Status)(nil), // 51: google.rpc.Status } var file_github_com_moby_buildkit_api_services_control_control_proto_depIdxs = []int32{ 4, // 0: moby.buildkit.v1.DiskUsageResponse.record:type_name -> moby.buildkit.v1.UsageRecord - 40, // 1: moby.buildkit.v1.UsageRecord.CreatedAt:type_name -> google.protobuf.Timestamp - 40, // 2: moby.buildkit.v1.UsageRecord.LastUsedAt:type_name -> google.protobuf.Timestamp - 41, // 3: moby.buildkit.v1.SolveRequest.Definition:type_name -> pb.Definition - 28, // 4: moby.buildkit.v1.SolveRequest.ExporterAttrsDeprecated:type_name -> moby.buildkit.v1.SolveRequest.ExporterAttrsDeprecatedEntry - 29, // 5: moby.buildkit.v1.SolveRequest.FrontendAttrs:type_name -> moby.buildkit.v1.SolveRequest.FrontendAttrsEntry + 43, // 1: moby.buildkit.v1.UsageRecord.CreatedAt:type_name -> google.protobuf.Timestamp + 43, // 2: moby.buildkit.v1.UsageRecord.LastUsedAt:type_name -> google.protobuf.Timestamp + 44, // 3: moby.buildkit.v1.SolveRequest.Definition:type_name -> pb.Definition + 30, // 4: moby.buildkit.v1.SolveRequest.ExporterAttrsDeprecated:type_name -> moby.buildkit.v1.SolveRequest.ExporterAttrsDeprecatedEntry + 31, // 5: moby.buildkit.v1.SolveRequest.FrontendAttrs:type_name -> moby.buildkit.v1.SolveRequest.FrontendAttrsEntry 6, // 6: moby.buildkit.v1.SolveRequest.Cache:type_name -> moby.buildkit.v1.CacheOptions - 30, // 7: moby.buildkit.v1.SolveRequest.FrontendInputs:type_name -> moby.buildkit.v1.SolveRequest.FrontendInputsEntry - 42, // 8: moby.buildkit.v1.SolveRequest.SourcePolicy:type_name -> moby.buildkit.v1.sourcepolicy.Policy + 32, // 7: moby.buildkit.v1.SolveRequest.FrontendInputs:type_name -> moby.buildkit.v1.SolveRequest.FrontendInputsEntry + 45, // 8: moby.buildkit.v1.SolveRequest.SourcePolicy:type_name -> moby.buildkit.v1.sourcepolicy.Policy 27, // 9: moby.buildkit.v1.SolveRequest.Exporters:type_name -> moby.buildkit.v1.Exporter - 31, // 10: moby.buildkit.v1.CacheOptions.ExportAttrsDeprecated:type_name -> moby.buildkit.v1.CacheOptions.ExportAttrsDeprecatedEntry + 33, // 10: moby.buildkit.v1.CacheOptions.ExportAttrsDeprecated:type_name -> moby.buildkit.v1.CacheOptions.ExportAttrsDeprecatedEntry 7, // 11: moby.buildkit.v1.CacheOptions.Exports:type_name -> moby.buildkit.v1.CacheOptionsEntry 7, // 12: moby.buildkit.v1.CacheOptions.Imports:type_name -> moby.buildkit.v1.CacheOptionsEntry - 32, // 13: moby.buildkit.v1.CacheOptionsEntry.Attrs:type_name -> moby.buildkit.v1.CacheOptionsEntry.AttrsEntry - 33, // 14: moby.buildkit.v1.SolveResponse.ExporterResponse:type_name -> moby.buildkit.v1.SolveResponse.ExporterResponseEntry - 11, // 15: moby.buildkit.v1.StatusResponse.vertexes:type_name -> moby.buildkit.v1.Vertex - 12, // 16: moby.buildkit.v1.StatusResponse.statuses:type_name -> moby.buildkit.v1.VertexStatus - 13, // 17: moby.buildkit.v1.StatusResponse.logs:type_name -> moby.buildkit.v1.VertexLog - 14, // 18: moby.buildkit.v1.StatusResponse.warnings:type_name -> moby.buildkit.v1.VertexWarning - 40, // 19: moby.buildkit.v1.Vertex.started:type_name -> google.protobuf.Timestamp - 40, // 20: moby.buildkit.v1.Vertex.completed:type_name -> google.protobuf.Timestamp - 43, // 21: moby.buildkit.v1.Vertex.progressGroup:type_name -> pb.ProgressGroup - 40, // 22: moby.buildkit.v1.VertexStatus.timestamp:type_name -> google.protobuf.Timestamp - 40, // 23: moby.buildkit.v1.VertexStatus.started:type_name -> google.protobuf.Timestamp - 40, // 24: moby.buildkit.v1.VertexStatus.completed:type_name -> google.protobuf.Timestamp - 40, // 25: moby.buildkit.v1.VertexLog.timestamp:type_name -> google.protobuf.Timestamp - 44, // 26: moby.buildkit.v1.VertexWarning.info:type_name -> pb.SourceInfo - 45, // 27: moby.buildkit.v1.VertexWarning.ranges:type_name -> pb.Range - 46, // 28: moby.buildkit.v1.ListWorkersResponse.record:type_name -> moby.buildkit.v1.types.WorkerRecord - 47, // 29: moby.buildkit.v1.InfoResponse.buildkitVersion:type_name -> moby.buildkit.v1.types.BuildkitVersion - 0, // 30: moby.buildkit.v1.BuildHistoryEvent.type:type_name -> moby.buildkit.v1.BuildHistoryEventType - 22, // 31: moby.buildkit.v1.BuildHistoryEvent.record:type_name -> moby.buildkit.v1.BuildHistoryRecord - 34, // 32: moby.buildkit.v1.BuildHistoryRecord.FrontendAttrs:type_name -> moby.buildkit.v1.BuildHistoryRecord.FrontendAttrsEntry - 27, // 33: moby.buildkit.v1.BuildHistoryRecord.Exporters:type_name -> moby.buildkit.v1.Exporter - 48, // 34: moby.buildkit.v1.BuildHistoryRecord.error:type_name -> google.rpc.Status - 40, // 35: moby.buildkit.v1.BuildHistoryRecord.CreatedAt:type_name -> google.protobuf.Timestamp - 40, // 36: moby.buildkit.v1.BuildHistoryRecord.CompletedAt:type_name -> google.protobuf.Timestamp - 25, // 37: moby.buildkit.v1.BuildHistoryRecord.logs:type_name -> moby.buildkit.v1.Descriptor - 35, // 38: moby.buildkit.v1.BuildHistoryRecord.ExporterResponse:type_name -> moby.buildkit.v1.BuildHistoryRecord.ExporterResponseEntry - 26, // 39: moby.buildkit.v1.BuildHistoryRecord.Result:type_name -> moby.buildkit.v1.BuildResultInfo - 36, // 40: moby.buildkit.v1.BuildHistoryRecord.Results:type_name -> moby.buildkit.v1.BuildHistoryRecord.ResultsEntry - 25, // 41: moby.buildkit.v1.BuildHistoryRecord.trace:type_name -> moby.buildkit.v1.Descriptor - 25, // 42: moby.buildkit.v1.BuildHistoryRecord.externalError:type_name -> moby.buildkit.v1.Descriptor - 37, // 43: moby.buildkit.v1.Descriptor.annotations:type_name -> moby.buildkit.v1.Descriptor.AnnotationsEntry - 25, // 44: moby.buildkit.v1.BuildResultInfo.ResultDeprecated:type_name -> moby.buildkit.v1.Descriptor - 25, // 45: moby.buildkit.v1.BuildResultInfo.Attestations:type_name -> moby.buildkit.v1.Descriptor - 38, // 46: moby.buildkit.v1.BuildResultInfo.Results:type_name -> moby.buildkit.v1.BuildResultInfo.ResultsEntry - 39, // 47: moby.buildkit.v1.Exporter.Attrs:type_name -> moby.buildkit.v1.Exporter.AttrsEntry - 41, // 48: moby.buildkit.v1.SolveRequest.FrontendInputsEntry.value:type_name -> pb.Definition - 26, // 49: moby.buildkit.v1.BuildHistoryRecord.ResultsEntry.value:type_name -> moby.buildkit.v1.BuildResultInfo - 25, // 50: moby.buildkit.v1.BuildResultInfo.ResultsEntry.value:type_name -> moby.buildkit.v1.Descriptor - 2, // 51: moby.buildkit.v1.Control.DiskUsage:input_type -> moby.buildkit.v1.DiskUsageRequest - 1, // 52: moby.buildkit.v1.Control.Prune:input_type -> moby.buildkit.v1.PruneRequest - 5, // 53: moby.buildkit.v1.Control.Solve:input_type -> moby.buildkit.v1.SolveRequest - 9, // 54: moby.buildkit.v1.Control.Status:input_type -> moby.buildkit.v1.StatusRequest - 15, // 55: moby.buildkit.v1.Control.Session:input_type -> moby.buildkit.v1.BytesMessage - 16, // 56: moby.buildkit.v1.Control.ListWorkers:input_type -> moby.buildkit.v1.ListWorkersRequest - 18, // 57: moby.buildkit.v1.Control.Info:input_type -> moby.buildkit.v1.InfoRequest - 20, // 58: moby.buildkit.v1.Control.ListenBuildHistory:input_type -> moby.buildkit.v1.BuildHistoryRequest - 23, // 59: moby.buildkit.v1.Control.UpdateBuildHistory:input_type -> moby.buildkit.v1.UpdateBuildHistoryRequest - 3, // 60: moby.buildkit.v1.Control.DiskUsage:output_type -> moby.buildkit.v1.DiskUsageResponse - 4, // 61: moby.buildkit.v1.Control.Prune:output_type -> moby.buildkit.v1.UsageRecord - 8, // 62: moby.buildkit.v1.Control.Solve:output_type -> moby.buildkit.v1.SolveResponse - 10, // 63: moby.buildkit.v1.Control.Status:output_type -> moby.buildkit.v1.StatusResponse - 15, // 64: moby.buildkit.v1.Control.Session:output_type -> moby.buildkit.v1.BytesMessage - 17, // 65: moby.buildkit.v1.Control.ListWorkers:output_type -> moby.buildkit.v1.ListWorkersResponse - 19, // 66: moby.buildkit.v1.Control.Info:output_type -> moby.buildkit.v1.InfoResponse - 21, // 67: moby.buildkit.v1.Control.ListenBuildHistory:output_type -> moby.buildkit.v1.BuildHistoryEvent - 24, // 68: moby.buildkit.v1.Control.UpdateBuildHistory:output_type -> moby.buildkit.v1.UpdateBuildHistoryResponse - 60, // [60:69] is the sub-list for method output_type - 51, // [51:60] is the sub-list for method input_type - 51, // [51:51] is the sub-list for extension type_name - 51, // [51:51] is the sub-list for extension extendee - 0, // [0:51] is the sub-list for field type_name + 34, // 13: moby.buildkit.v1.CacheOptionsEntry.Attrs:type_name -> moby.buildkit.v1.CacheOptionsEntry.AttrsEntry + 35, // 14: moby.buildkit.v1.SolveResponse.ExporterResponseDeprecated:type_name -> moby.buildkit.v1.SolveResponse.ExporterResponseDeprecatedEntry + 28, // 15: moby.buildkit.v1.SolveResponse.exporterResponses:type_name -> moby.buildkit.v1.ExporterResponse + 28, // 16: moby.buildkit.v1.SolveResponse.cacheExporterResponses:type_name -> moby.buildkit.v1.ExporterResponse + 11, // 17: moby.buildkit.v1.StatusResponse.vertexes:type_name -> moby.buildkit.v1.Vertex + 12, // 18: moby.buildkit.v1.StatusResponse.statuses:type_name -> moby.buildkit.v1.VertexStatus + 13, // 19: moby.buildkit.v1.StatusResponse.logs:type_name -> moby.buildkit.v1.VertexLog + 14, // 20: moby.buildkit.v1.StatusResponse.warnings:type_name -> moby.buildkit.v1.VertexWarning + 43, // 21: moby.buildkit.v1.Vertex.started:type_name -> google.protobuf.Timestamp + 43, // 22: moby.buildkit.v1.Vertex.completed:type_name -> google.protobuf.Timestamp + 46, // 23: moby.buildkit.v1.Vertex.progressGroup:type_name -> pb.ProgressGroup + 43, // 24: moby.buildkit.v1.VertexStatus.timestamp:type_name -> google.protobuf.Timestamp + 43, // 25: moby.buildkit.v1.VertexStatus.started:type_name -> google.protobuf.Timestamp + 43, // 26: moby.buildkit.v1.VertexStatus.completed:type_name -> google.protobuf.Timestamp + 43, // 27: moby.buildkit.v1.VertexLog.timestamp:type_name -> google.protobuf.Timestamp + 47, // 28: moby.buildkit.v1.VertexWarning.info:type_name -> pb.SourceInfo + 48, // 29: moby.buildkit.v1.VertexWarning.ranges:type_name -> pb.Range + 49, // 30: moby.buildkit.v1.ListWorkersResponse.record:type_name -> moby.buildkit.v1.types.WorkerRecord + 50, // 31: moby.buildkit.v1.InfoResponse.buildkitVersion:type_name -> moby.buildkit.v1.types.BuildkitVersion + 0, // 32: moby.buildkit.v1.BuildHistoryEvent.type:type_name -> moby.buildkit.v1.BuildHistoryEventType + 22, // 33: moby.buildkit.v1.BuildHistoryEvent.record:type_name -> moby.buildkit.v1.BuildHistoryRecord + 36, // 34: moby.buildkit.v1.BuildHistoryRecord.FrontendAttrs:type_name -> moby.buildkit.v1.BuildHistoryRecord.FrontendAttrsEntry + 27, // 35: moby.buildkit.v1.BuildHistoryRecord.Exporters:type_name -> moby.buildkit.v1.Exporter + 51, // 36: moby.buildkit.v1.BuildHistoryRecord.error:type_name -> google.rpc.Status + 43, // 37: moby.buildkit.v1.BuildHistoryRecord.CreatedAt:type_name -> google.protobuf.Timestamp + 43, // 38: moby.buildkit.v1.BuildHistoryRecord.CompletedAt:type_name -> google.protobuf.Timestamp + 25, // 39: moby.buildkit.v1.BuildHistoryRecord.logs:type_name -> moby.buildkit.v1.Descriptor + 37, // 40: moby.buildkit.v1.BuildHistoryRecord.ExporterResponse:type_name -> moby.buildkit.v1.BuildHistoryRecord.ExporterResponseEntry + 26, // 41: moby.buildkit.v1.BuildHistoryRecord.Result:type_name -> moby.buildkit.v1.BuildResultInfo + 38, // 42: moby.buildkit.v1.BuildHistoryRecord.Results:type_name -> moby.buildkit.v1.BuildHistoryRecord.ResultsEntry + 25, // 43: moby.buildkit.v1.BuildHistoryRecord.trace:type_name -> moby.buildkit.v1.Descriptor + 25, // 44: moby.buildkit.v1.BuildHistoryRecord.externalError:type_name -> moby.buildkit.v1.Descriptor + 39, // 45: moby.buildkit.v1.Descriptor.annotations:type_name -> moby.buildkit.v1.Descriptor.AnnotationsEntry + 25, // 46: moby.buildkit.v1.BuildResultInfo.ResultDeprecated:type_name -> moby.buildkit.v1.Descriptor + 25, // 47: moby.buildkit.v1.BuildResultInfo.Attestations:type_name -> moby.buildkit.v1.Descriptor + 40, // 48: moby.buildkit.v1.BuildResultInfo.Results:type_name -> moby.buildkit.v1.BuildResultInfo.ResultsEntry + 41, // 49: moby.buildkit.v1.Exporter.Attrs:type_name -> moby.buildkit.v1.Exporter.AttrsEntry + 29, // 50: moby.buildkit.v1.ExporterResponse.metadata:type_name -> moby.buildkit.v1.ExporterMetadata + 42, // 51: moby.buildkit.v1.ExporterResponse.data:type_name -> moby.buildkit.v1.ExporterResponse.DataEntry + 44, // 52: moby.buildkit.v1.SolveRequest.FrontendInputsEntry.value:type_name -> pb.Definition + 26, // 53: moby.buildkit.v1.BuildHistoryRecord.ResultsEntry.value:type_name -> moby.buildkit.v1.BuildResultInfo + 25, // 54: moby.buildkit.v1.BuildResultInfo.ResultsEntry.value:type_name -> moby.buildkit.v1.Descriptor + 2, // 55: moby.buildkit.v1.Control.DiskUsage:input_type -> moby.buildkit.v1.DiskUsageRequest + 1, // 56: moby.buildkit.v1.Control.Prune:input_type -> moby.buildkit.v1.PruneRequest + 5, // 57: moby.buildkit.v1.Control.Solve:input_type -> moby.buildkit.v1.SolveRequest + 9, // 58: moby.buildkit.v1.Control.Status:input_type -> moby.buildkit.v1.StatusRequest + 15, // 59: moby.buildkit.v1.Control.Session:input_type -> moby.buildkit.v1.BytesMessage + 16, // 60: moby.buildkit.v1.Control.ListWorkers:input_type -> moby.buildkit.v1.ListWorkersRequest + 18, // 61: moby.buildkit.v1.Control.Info:input_type -> moby.buildkit.v1.InfoRequest + 20, // 62: moby.buildkit.v1.Control.ListenBuildHistory:input_type -> moby.buildkit.v1.BuildHistoryRequest + 23, // 63: moby.buildkit.v1.Control.UpdateBuildHistory:input_type -> moby.buildkit.v1.UpdateBuildHistoryRequest + 3, // 64: moby.buildkit.v1.Control.DiskUsage:output_type -> moby.buildkit.v1.DiskUsageResponse + 4, // 65: moby.buildkit.v1.Control.Prune:output_type -> moby.buildkit.v1.UsageRecord + 8, // 66: moby.buildkit.v1.Control.Solve:output_type -> moby.buildkit.v1.SolveResponse + 10, // 67: moby.buildkit.v1.Control.Status:output_type -> moby.buildkit.v1.StatusResponse + 15, // 68: moby.buildkit.v1.Control.Session:output_type -> moby.buildkit.v1.BytesMessage + 17, // 69: moby.buildkit.v1.Control.ListWorkers:output_type -> moby.buildkit.v1.ListWorkersResponse + 19, // 70: moby.buildkit.v1.Control.Info:output_type -> moby.buildkit.v1.InfoResponse + 21, // 71: moby.buildkit.v1.Control.ListenBuildHistory:output_type -> moby.buildkit.v1.BuildHistoryEvent + 24, // 72: moby.buildkit.v1.Control.UpdateBuildHistory:output_type -> moby.buildkit.v1.UpdateBuildHistoryResponse + 64, // [64:73] is the sub-list for method output_type + 55, // [55:64] is the sub-list for method input_type + 55, // [55:55] is the sub-list for extension type_name + 55, // [55:55] is the sub-list for extension extendee + 0, // [0:55] is the sub-list for field type_name } func init() { file_github_com_moby_buildkit_api_services_control_control_proto_init() } @@ -2396,7 +2566,7 @@ func file_github_com_moby_buildkit_api_services_control_control_proto_init() { GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: unsafe.Slice(unsafe.StringData(file_github_com_moby_buildkit_api_services_control_control_proto_rawDesc), len(file_github_com_moby_buildkit_api_services_control_control_proto_rawDesc)), NumEnums: 1, - NumMessages: 39, + NumMessages: 42, NumExtensions: 0, NumServices: 1, }, diff --git a/api/services/control/control.proto b/api/services/control/control.proto index 128408e8426a..c1d0d1e6eb83 100644 --- a/api/services/control/control.proto +++ b/api/services/control/control.proto @@ -35,7 +35,7 @@ message PruneRequest { } message DiskUsageRequest { - repeated string filter = 1; + repeated string filter = 1; int64 ageLimit = 2; } @@ -105,10 +105,19 @@ message CacheOptionsEntry { // Attrs are like mode=(min,max), ref=example.com:5000/foo/bar . // See cache importer/exporter implementations' documentation. map Attrs = 2; + // ID identifies this exporter. + // ID should be treated by the exporter as opaque. + string ID = 3; } message SolveResponse { - map ExporterResponse = 1; + // ExporterResponseDeprecated is a combined exporter response - it aggregates + // responses from all exporters (including cache exporters) running in parallel. + // It is deprecated in favor of the structured exporter responses but will be + // populated as long as it is supported. + map ExporterResponseDeprecated = 1; + repeated ExporterResponse exporterResponses = 2; + repeated ExporterResponse cacheExporterResponses = 3; } message StatusRequest { @@ -250,4 +259,23 @@ message Exporter { string Type = 1; // Attrs specifies exporter configuration map Attrs = 2; + // ID identifies this exporter. + // ID should be treated by the exporter as opaque. + string ID = 3; +} + +// ExporterResponse describes the output of an exporter +message ExporterResponse { + // Metadata describes the exporter + ExporterMetadata metadata = 1; + // Data is the exporter's output + map data = 2; +} + +// ExporterMetadata describes the output exporter +message ExporterMetadata { + // ID identifies the exporter + string ID = 1; + // Type identifies the exporter type + string type = 2; } diff --git a/api/services/control/control_vtproto.pb.go b/api/services/control/control_vtproto.pb.go index 4faf1b112c5d..7152d697ed2c 100644 --- a/api/services/control/control_vtproto.pb.go +++ b/api/services/control/control_vtproto.pb.go @@ -236,6 +236,7 @@ func (m *CacheOptionsEntry) CloneVT() *CacheOptionsEntry { } r := new(CacheOptionsEntry) r.Type = m.Type + r.ID = m.ID if rhs := m.Attrs; rhs != nil { tmpContainer := make(map[string]string, len(rhs)) for k, v := range rhs { @@ -259,12 +260,26 @@ func (m *SolveResponse) CloneVT() *SolveResponse { return (*SolveResponse)(nil) } r := new(SolveResponse) - if rhs := m.ExporterResponse; rhs != nil { + if rhs := m.ExporterResponseDeprecated; rhs != nil { tmpContainer := make(map[string]string, len(rhs)) for k, v := range rhs { tmpContainer[k] = v } - r.ExporterResponse = tmpContainer + r.ExporterResponseDeprecated = tmpContainer + } + if rhs := m.ExporterResponses; rhs != nil { + tmpContainer := make([]*ExporterResponse, len(rhs)) + for k, v := range rhs { + tmpContainer[k] = v.CloneVT() + } + r.ExporterResponses = tmpContainer + } + if rhs := m.CacheExporterResponses; rhs != nil { + tmpContainer := make([]*ExporterResponse, len(rhs)) + for k, v := range rhs { + tmpContainer[k] = v.CloneVT() + } + r.CacheExporterResponses = tmpContainer } if len(m.unknownFields) > 0 { r.unknownFields = make([]byte, len(m.unknownFields)) @@ -760,6 +775,7 @@ func (m *Exporter) CloneVT() *Exporter { } r := new(Exporter) r.Type = m.Type + r.ID = m.ID if rhs := m.Attrs; rhs != nil { tmpContainer := make(map[string]string, len(rhs)) for k, v := range rhs { @@ -778,6 +794,48 @@ func (m *Exporter) CloneMessageVT() proto.Message { return m.CloneVT() } +func (m *ExporterResponse) CloneVT() *ExporterResponse { + if m == nil { + return (*ExporterResponse)(nil) + } + r := new(ExporterResponse) + r.Metadata = m.Metadata.CloneVT() + if rhs := m.Data; rhs != nil { + tmpContainer := make(map[string]string, len(rhs)) + for k, v := range rhs { + tmpContainer[k] = v + } + r.Data = tmpContainer + } + if len(m.unknownFields) > 0 { + r.unknownFields = make([]byte, len(m.unknownFields)) + copy(r.unknownFields, m.unknownFields) + } + return r +} + +func (m *ExporterResponse) CloneMessageVT() proto.Message { + return m.CloneVT() +} + +func (m *ExporterMetadata) CloneVT() *ExporterMetadata { + if m == nil { + return (*ExporterMetadata)(nil) + } + r := new(ExporterMetadata) + r.ID = m.ID + r.Type = m.Type + if len(m.unknownFields) > 0 { + r.unknownFields = make([]byte, len(m.unknownFields)) + copy(r.unknownFields, m.unknownFields) + } + return r +} + +func (m *ExporterMetadata) CloneMessageVT() proto.Message { + return m.CloneVT() +} + func (this *PruneRequest) EqualVT(that *PruneRequest) bool { if this == that { return true @@ -1148,6 +1206,9 @@ func (this *CacheOptionsEntry) EqualVT(that *CacheOptionsEntry) bool { return false } } + if this.ID != that.ID { + return false + } return string(this.unknownFields) == string(that.unknownFields) } @@ -1164,11 +1225,11 @@ func (this *SolveResponse) EqualVT(that *SolveResponse) bool { } else if this == nil || that == nil { return false } - if len(this.ExporterResponse) != len(that.ExporterResponse) { + if len(this.ExporterResponseDeprecated) != len(that.ExporterResponseDeprecated) { return false } - for i, vx := range this.ExporterResponse { - vy, ok := that.ExporterResponse[i] + for i, vx := range this.ExporterResponseDeprecated { + vy, ok := that.ExporterResponseDeprecated[i] if !ok { return false } @@ -1176,6 +1237,40 @@ func (this *SolveResponse) EqualVT(that *SolveResponse) bool { return false } } + if len(this.ExporterResponses) != len(that.ExporterResponses) { + return false + } + for i, vx := range this.ExporterResponses { + vy := that.ExporterResponses[i] + if p, q := vx, vy; p != q { + if p == nil { + p = &ExporterResponse{} + } + if q == nil { + q = &ExporterResponse{} + } + if !p.EqualVT(q) { + return false + } + } + } + if len(this.CacheExporterResponses) != len(that.CacheExporterResponses) { + return false + } + for i, vx := range this.CacheExporterResponses { + vy := that.CacheExporterResponses[i] + if p, q := vx, vy; p != q { + if p == nil { + p = &ExporterResponse{} + } + if q == nil { + q = &ExporterResponse{} + } + if !p.EqualVT(q) { + return false + } + } + } return string(this.unknownFields) == string(that.unknownFields) } @@ -1915,6 +2010,9 @@ func (this *Exporter) EqualVT(that *Exporter) bool { return false } } + if this.ID != that.ID { + return false + } return string(this.unknownFields) == string(that.unknownFields) } @@ -1925,6 +2023,59 @@ func (this *Exporter) EqualMessageVT(thatMsg proto.Message) bool { } return this.EqualVT(that) } +func (this *ExporterResponse) EqualVT(that *ExporterResponse) bool { + if this == that { + return true + } else if this == nil || that == nil { + return false + } + if !this.Metadata.EqualVT(that.Metadata) { + return false + } + if len(this.Data) != len(that.Data) { + return false + } + for i, vx := range this.Data { + vy, ok := that.Data[i] + if !ok { + return false + } + if vx != vy { + return false + } + } + return string(this.unknownFields) == string(that.unknownFields) +} + +func (this *ExporterResponse) EqualMessageVT(thatMsg proto.Message) bool { + that, ok := thatMsg.(*ExporterResponse) + if !ok { + return false + } + return this.EqualVT(that) +} +func (this *ExporterMetadata) EqualVT(that *ExporterMetadata) bool { + if this == that { + return true + } else if this == nil || that == nil { + return false + } + if this.ID != that.ID { + return false + } + if this.Type != that.Type { + return false + } + return string(this.unknownFields) == string(that.unknownFields) +} + +func (this *ExporterMetadata) EqualMessageVT(thatMsg proto.Message) bool { + that, ok := thatMsg.(*ExporterMetadata) + if !ok { + return false + } + return this.EqualVT(that) +} func (m *PruneRequest) MarshalVT() (dAtA []byte, err error) { if m == nil { return nil, nil @@ -2540,6 +2691,13 @@ func (m *CacheOptionsEntry) MarshalToSizedBufferVT(dAtA []byte) (int, error) { i -= len(m.unknownFields) copy(dAtA[i:], m.unknownFields) } + if len(m.ID) > 0 { + i -= len(m.ID) + copy(dAtA[i:], m.ID) + i = protohelpers.EncodeVarint(dAtA, i, uint64(len(m.ID))) + i-- + dAtA[i] = 0x1a + } if len(m.Attrs) > 0 { for k := range m.Attrs { v := m.Attrs[k] @@ -2599,9 +2757,33 @@ func (m *SolveResponse) MarshalToSizedBufferVT(dAtA []byte) (int, error) { i -= len(m.unknownFields) copy(dAtA[i:], m.unknownFields) } - if len(m.ExporterResponse) > 0 { - for k := range m.ExporterResponse { - v := m.ExporterResponse[k] + if len(m.CacheExporterResponses) > 0 { + for iNdEx := len(m.CacheExporterResponses) - 1; iNdEx >= 0; iNdEx-- { + size, err := m.CacheExporterResponses[iNdEx].MarshalToSizedBufferVT(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = protohelpers.EncodeVarint(dAtA, i, uint64(size)) + i-- + dAtA[i] = 0x1a + } + } + if len(m.ExporterResponses) > 0 { + for iNdEx := len(m.ExporterResponses) - 1; iNdEx >= 0; iNdEx-- { + size, err := m.ExporterResponses[iNdEx].MarshalToSizedBufferVT(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = protohelpers.EncodeVarint(dAtA, i, uint64(size)) + i-- + dAtA[i] = 0x12 + } + } + if len(m.ExporterResponseDeprecated) > 0 { + for k := range m.ExporterResponseDeprecated { + v := m.ExporterResponseDeprecated[k] baseI := i i -= len(v) copy(dAtA[i:], v) @@ -3939,6 +4121,13 @@ func (m *Exporter) MarshalToSizedBufferVT(dAtA []byte) (int, error) { i -= len(m.unknownFields) copy(dAtA[i:], m.unknownFields) } + if len(m.ID) > 0 { + i -= len(m.ID) + copy(dAtA[i:], m.ID) + i = protohelpers.EncodeVarint(dAtA, i, uint64(len(m.ID))) + i-- + dAtA[i] = 0x1a + } if len(m.Attrs) > 0 { for k := range m.Attrs { v := m.Attrs[k] @@ -3968,6 +4157,115 @@ func (m *Exporter) MarshalToSizedBufferVT(dAtA []byte) (int, error) { return len(dAtA) - i, nil } +func (m *ExporterResponse) MarshalVT() (dAtA []byte, err error) { + if m == nil { + return nil, nil + } + size := m.SizeVT() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBufferVT(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *ExporterResponse) MarshalToVT(dAtA []byte) (int, error) { + size := m.SizeVT() + return m.MarshalToSizedBufferVT(dAtA[:size]) +} + +func (m *ExporterResponse) MarshalToSizedBufferVT(dAtA []byte) (int, error) { + if m == nil { + return 0, nil + } + i := len(dAtA) + _ = i + var l int + _ = l + if m.unknownFields != nil { + i -= len(m.unknownFields) + copy(dAtA[i:], m.unknownFields) + } + if len(m.Data) > 0 { + for k := range m.Data { + v := m.Data[k] + baseI := i + i -= len(v) + copy(dAtA[i:], v) + i = protohelpers.EncodeVarint(dAtA, i, uint64(len(v))) + i-- + dAtA[i] = 0x12 + i -= len(k) + copy(dAtA[i:], k) + i = protohelpers.EncodeVarint(dAtA, i, uint64(len(k))) + i-- + dAtA[i] = 0xa + i = protohelpers.EncodeVarint(dAtA, i, uint64(baseI-i)) + i-- + dAtA[i] = 0x12 + } + } + if m.Metadata != nil { + size, err := m.Metadata.MarshalToSizedBufferVT(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = protohelpers.EncodeVarint(dAtA, i, uint64(size)) + i-- + dAtA[i] = 0xa + } + return len(dAtA) - i, nil +} + +func (m *ExporterMetadata) MarshalVT() (dAtA []byte, err error) { + if m == nil { + return nil, nil + } + size := m.SizeVT() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBufferVT(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *ExporterMetadata) MarshalToVT(dAtA []byte) (int, error) { + size := m.SizeVT() + return m.MarshalToSizedBufferVT(dAtA[:size]) +} + +func (m *ExporterMetadata) MarshalToSizedBufferVT(dAtA []byte) (int, error) { + if m == nil { + return 0, nil + } + i := len(dAtA) + _ = i + var l int + _ = l + if m.unknownFields != nil { + i -= len(m.unknownFields) + copy(dAtA[i:], m.unknownFields) + } + if len(m.Type) > 0 { + i -= len(m.Type) + copy(dAtA[i:], m.Type) + i = protohelpers.EncodeVarint(dAtA, i, uint64(len(m.Type))) + i-- + dAtA[i] = 0x12 + } + if len(m.ID) > 0 { + i -= len(m.ID) + copy(dAtA[i:], m.ID) + i = protohelpers.EncodeVarint(dAtA, i, uint64(len(m.ID))) + i-- + dAtA[i] = 0xa + } + return len(dAtA) - i, nil +} + func (m *PruneRequest) SizeVT() (n int) { if m == nil { return 0 @@ -4236,8 +4534,12 @@ func (m *CacheOptionsEntry) SizeVT() (n int) { n += mapEntrySize + 1 + protohelpers.SizeOfVarint(uint64(mapEntrySize)) } } - n += len(m.unknownFields) - return n + l = len(m.ID) + if l > 0 { + n += 1 + l + protohelpers.SizeOfVarint(uint64(l)) + } + n += len(m.unknownFields) + return n } func (m *SolveResponse) SizeVT() (n int) { @@ -4246,14 +4548,26 @@ func (m *SolveResponse) SizeVT() (n int) { } var l int _ = l - if len(m.ExporterResponse) > 0 { - for k, v := range m.ExporterResponse { + if len(m.ExporterResponseDeprecated) > 0 { + for k, v := range m.ExporterResponseDeprecated { _ = k _ = v mapEntrySize := 1 + len(k) + protohelpers.SizeOfVarint(uint64(len(k))) + 1 + len(v) + protohelpers.SizeOfVarint(uint64(len(v))) n += mapEntrySize + 1 + protohelpers.SizeOfVarint(uint64(mapEntrySize)) } } + if len(m.ExporterResponses) > 0 { + for _, e := range m.ExporterResponses { + l = e.SizeVT() + n += 1 + l + protohelpers.SizeOfVarint(uint64(l)) + } + } + if len(m.CacheExporterResponses) > 0 { + for _, e := range m.CacheExporterResponses { + l = e.SizeVT() + n += 1 + l + protohelpers.SizeOfVarint(uint64(l)) + } + } n += len(m.unknownFields) return n } @@ -4789,6 +5103,50 @@ func (m *Exporter) SizeVT() (n int) { n += mapEntrySize + 1 + protohelpers.SizeOfVarint(uint64(mapEntrySize)) } } + l = len(m.ID) + if l > 0 { + n += 1 + l + protohelpers.SizeOfVarint(uint64(l)) + } + n += len(m.unknownFields) + return n +} + +func (m *ExporterResponse) SizeVT() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if m.Metadata != nil { + l = m.Metadata.SizeVT() + n += 1 + l + protohelpers.SizeOfVarint(uint64(l)) + } + if len(m.Data) > 0 { + for k, v := range m.Data { + _ = k + _ = v + mapEntrySize := 1 + len(k) + protohelpers.SizeOfVarint(uint64(len(k))) + 1 + len(v) + protohelpers.SizeOfVarint(uint64(len(v))) + n += mapEntrySize + 1 + protohelpers.SizeOfVarint(uint64(mapEntrySize)) + } + } + n += len(m.unknownFields) + return n +} + +func (m *ExporterMetadata) SizeVT() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + l = len(m.ID) + if l > 0 { + n += 1 + l + protohelpers.SizeOfVarint(uint64(l)) + } + l = len(m.Type) + if l > 0 { + n += 1 + l + protohelpers.SizeOfVarint(uint64(l)) + } n += len(m.unknownFields) return n } @@ -6846,6 +7204,38 @@ func (m *CacheOptionsEntry) UnmarshalVT(dAtA []byte) error { } m.Attrs[mapkey] = mapvalue iNdEx = postIndex + case 3: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field ID", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return protohelpers.ErrIntOverflow + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return protohelpers.ErrInvalidLength + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return protohelpers.ErrInvalidLength + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.ID = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex default: iNdEx = preIndex skippy, err := protohelpers.Skip(dAtA[iNdEx:]) @@ -6899,7 +7289,7 @@ func (m *SolveResponse) UnmarshalVT(dAtA []byte) error { switch fieldNum { case 1: if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field ExporterResponse", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field ExporterResponseDeprecated", wireType) } var msglen int for shift := uint(0); ; shift += 7 { @@ -6926,8 +7316,8 @@ func (m *SolveResponse) UnmarshalVT(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - if m.ExporterResponse == nil { - m.ExporterResponse = make(map[string]string) + if m.ExporterResponseDeprecated == nil { + m.ExporterResponseDeprecated = make(map[string]string) } var mapkey string var mapvalue string @@ -7022,7 +7412,75 @@ func (m *SolveResponse) UnmarshalVT(dAtA []byte) error { iNdEx += skippy } } - m.ExporterResponse[mapkey] = mapvalue + m.ExporterResponseDeprecated[mapkey] = mapvalue + iNdEx = postIndex + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field ExporterResponses", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return protohelpers.ErrIntOverflow + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return protohelpers.ErrInvalidLength + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return protohelpers.ErrInvalidLength + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.ExporterResponses = append(m.ExporterResponses, &ExporterResponse{}) + if err := m.ExporterResponses[len(m.ExporterResponses)-1].UnmarshalVT(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 3: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field CacheExporterResponses", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return protohelpers.ErrIntOverflow + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return protohelpers.ErrInvalidLength + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return protohelpers.ErrInvalidLength + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.CacheExporterResponses = append(m.CacheExporterResponses, &ExporterResponse{}) + if err := m.CacheExporterResponses[len(m.CacheExporterResponses)-1].UnmarshalVT(dAtA[iNdEx:postIndex]); err != nil { + return err + } iNdEx = postIndex default: iNdEx = preIndex @@ -10815,6 +11273,367 @@ func (m *Exporter) UnmarshalVT(dAtA []byte) error { } m.Attrs[mapkey] = mapvalue iNdEx = postIndex + case 3: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field ID", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return protohelpers.ErrIntOverflow + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return protohelpers.ErrInvalidLength + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return protohelpers.ErrInvalidLength + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.ID = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := protohelpers.Skip(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return protohelpers.ErrInvalidLength + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + m.unknownFields = append(m.unknownFields, dAtA[iNdEx:iNdEx+skippy]...) + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *ExporterResponse) UnmarshalVT(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return protohelpers.ErrIntOverflow + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: ExporterResponse: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: ExporterResponse: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Metadata", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return protohelpers.ErrIntOverflow + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return protohelpers.ErrInvalidLength + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return protohelpers.ErrInvalidLength + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.Metadata == nil { + m.Metadata = &ExporterMetadata{} + } + if err := m.Metadata.UnmarshalVT(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Data", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return protohelpers.ErrIntOverflow + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return protohelpers.ErrInvalidLength + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return protohelpers.ErrInvalidLength + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.Data == nil { + m.Data = make(map[string]string) + } + var mapkey string + var mapvalue string + for iNdEx < postIndex { + entryPreIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return protohelpers.ErrIntOverflow + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + if fieldNum == 1 { + var stringLenmapkey uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return protohelpers.ErrIntOverflow + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLenmapkey |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLenmapkey := int(stringLenmapkey) + if intStringLenmapkey < 0 { + return protohelpers.ErrInvalidLength + } + postStringIndexmapkey := iNdEx + intStringLenmapkey + if postStringIndexmapkey < 0 { + return protohelpers.ErrInvalidLength + } + if postStringIndexmapkey > l { + return io.ErrUnexpectedEOF + } + mapkey = string(dAtA[iNdEx:postStringIndexmapkey]) + iNdEx = postStringIndexmapkey + } else if fieldNum == 2 { + var stringLenmapvalue uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return protohelpers.ErrIntOverflow + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLenmapvalue |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLenmapvalue := int(stringLenmapvalue) + if intStringLenmapvalue < 0 { + return protohelpers.ErrInvalidLength + } + postStringIndexmapvalue := iNdEx + intStringLenmapvalue + if postStringIndexmapvalue < 0 { + return protohelpers.ErrInvalidLength + } + if postStringIndexmapvalue > l { + return io.ErrUnexpectedEOF + } + mapvalue = string(dAtA[iNdEx:postStringIndexmapvalue]) + iNdEx = postStringIndexmapvalue + } else { + iNdEx = entryPreIndex + skippy, err := protohelpers.Skip(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return protohelpers.ErrInvalidLength + } + if (iNdEx + skippy) > postIndex { + return io.ErrUnexpectedEOF + } + iNdEx += skippy + } + } + m.Data[mapkey] = mapvalue + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := protohelpers.Skip(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return protohelpers.ErrInvalidLength + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + m.unknownFields = append(m.unknownFields, dAtA[iNdEx:iNdEx+skippy]...) + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *ExporterMetadata) UnmarshalVT(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return protohelpers.ErrIntOverflow + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: ExporterMetadata: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: ExporterMetadata: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field ID", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return protohelpers.ErrIntOverflow + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return protohelpers.ErrInvalidLength + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return protohelpers.ErrInvalidLength + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.ID = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Type", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return protohelpers.ErrIntOverflow + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return protohelpers.ErrInvalidLength + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return protohelpers.ErrInvalidLength + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.Type = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex default: iNdEx = preIndex skippy, err := protohelpers.Skip(dAtA[iNdEx:]) diff --git a/client/build.go b/client/build.go index 93787ba4ff55..a993aa4e1ff3 100644 --- a/client/build.go +++ b/client/build.go @@ -42,7 +42,7 @@ func (c *Client) Build(ctx context.Context, opt SolveOpt, product string, buildF }) } - cb := func(ref string, s *session.Session, opts map[string]string) error { + opt.s.runGateway = func(ref string, s *session.Session, opts map[string]string) error { if feOpts == nil { feOpts = map[string]string{} } @@ -62,7 +62,7 @@ func (c *Client) Build(ctx context.Context, opt SolveOpt, product string, buildF return nil } - return c.solve(ctx, nil, cb, opt, statusChan) + return c.solve(ctx, opt, statusChan) } func (c *Client) gatewayClientForBuild(buildid string) *gatewayClientForBuild { diff --git a/client/client_test.go b/client/client_test.go index 01a6ab3b8005..541492bafccc 100644 --- a/client/client_test.go +++ b/client/client_test.go @@ -174,6 +174,7 @@ var allTests = []func(t *testing.T, sb integration.Sandbox){ testTarExporterSymlink, testMultipleRegistryCacheImportExport, testMultipleExporters, + testMultipleImageExporters, testSourceMap, testSourceMapFromRef, testLazyImagePush, @@ -2706,7 +2707,7 @@ func testSessionExporter(t *testing.T, sb integration.Sandbox) { require.Equal(t, "foo", resp.Entries[1].Path) exporterCalled = true - target.Add(filesync.WithFSSync(0, fixedWriteCloser(nopWriteCloser{outW}))) + target.Add(filesync.WithFSSync("0", fixedWriteCloser(nopWriteCloser{outW}))) return []*exporter.ExporterRequest{ { Type: ExporterOCI, @@ -3848,6 +3849,87 @@ func testMultipleExporters(t *testing.T, sb integration.Sandbox) { } } +func testMultipleImageExporters(t *testing.T, sb integration.Sandbox) { + workers.CheckFeatureCompat(t, sb, workers.FeatureOCIExporter) + requiresLinux(t) + + c, err := New(sb.Context(), sb.Address()) + require.NoError(t, err) + defer c.Close() + + def, err := llb.Scratch().Marshal(context.TODO()) + require.NoError(t, err) + + destDir := t.TempDir() + ociTar := filepath.Join(destDir, "oci.tar") + ociOut, err := os.Create(ociTar) + require.NoError(t, err) + defer ociOut.Close() + + dockerTar := filepath.Join(destDir, "docker.tar") + dockerOut, err := os.Create(dockerTar) + require.NoError(t, err) + defer dockerOut.Close() + + exporters := []ExportEntry{ + { + Type: ExporterOCI, + Attrs: map[string]string{ + "dest": ociTar, + }, + Output: fixedWriteCloser(ociOut), + }, + { + Type: ExporterDocker, + Attrs: map[string]string{ + "dest": dockerTar, + }, + Output: fixedWriteCloser(dockerOut), + }, + } + + ref := identity.NewID() + _, err = c.Solve(sb.Context(), def, SolveOpt{ + Ref: ref, + Exports: exporters, + }, nil) + require.NoError(t, err) + + require.FileExists(t, filepath.Join(destDir, "oci.tar")) + require.FileExists(t, filepath.Join(destDir, "docker.tar")) + + ociConfig := extractImageConfig(t, ociTar) + dockerConfig := extractImageConfig(t, dockerTar) + // Validate that the image configurtion is not empty + require.NotEmpty(t, ociConfig.Architecture, "architecture is missing") + require.NotEmpty(t, dockerConfig.Architecture, "architecture is missing") +} + +func extractImageConfig(t *testing.T, path string) ocispecs.Image { + t.Helper() + + dt, err := os.ReadFile(path) + require.NoError(t, err) + + m, err := testutil.ReadTarToMap(dt, false) + require.NoError(t, err) + + var index ocispecs.Index + err = json.Unmarshal(m["index.json"].Data, &index) + require.NoError(t, err) + require.NotEmpty(t, index.Manifests, "index is missing platform manifests") + + var manifest ocispecs.Manifest + err = json.Unmarshal(m[filepath.Join("blobs", "sha256", index.Manifests[0].Digest.Encoded())].Data, &manifest) + require.NoError(t, err) + + var config ocispecs.Image + err = json.Unmarshal(m[filepath.Join("blobs", "sha256", manifest.Config.Digest.Encoded())].Data, &config) + require.NoError(t, err) + + return config +} + func testOCIExporter(t *testing.T, sb integration.Sandbox) { workers.CheckFeatureCompat(t, sb, workers.FeatureOCIExporter) requiresLinux(t) diff --git a/client/graph.go b/client/graph.go index 1000cd78f620..6fd67f09a203 100644 --- a/client/graph.go +++ b/client/graph.go @@ -56,5 +56,32 @@ type SolveStatus struct { type SolveResponse struct { // ExporterResponse is also used for CacheExporter - ExporterResponse map[string]string + ExporterResponse map[string]string + ExporterResponses []ExporterResponse + CacheExporterResponses []ExporterResponse +} + +func (r *SolveResponse) exporter(id string) *ExporterResponse { + return exporterResponses(r.ExporterResponses).withID(id) +} + +func (r *SolveResponse) cacheExporter(id string) *ExporterResponse { + return exporterResponses(r.CacheExporterResponses).withID(id) +} + +func (r exporterResponses) withID(id string) *ExporterResponse { + for _, exp := range r { + if exp.ID == id { + return &exp + } + } + return nil +} + +type exporterResponses []ExporterResponse + +type ExporterResponse struct { + ID string + Type string + Data map[string]string } diff --git a/client/solve.go b/client/solve.go index 3ac415527dde..5e8a879f2c24 100644 --- a/client/solve.go +++ b/client/solve.go @@ -8,6 +8,7 @@ import ( "maps" "os" "slices" + "strconv" "strings" "time" @@ -47,12 +48,44 @@ type SolveOpt struct { CacheImports []CacheOptionsEntry Session []session.Attachable AllowedEntitlements []string + // When the session is custom-initialized, Init can be used to + // set up the session for export automatically. SharedSession *session.Session // TODO: refactor to better session syncing SessionPreInitialized bool // TODO: refactor to better session syncing Internal bool SourcePolicy *spb.Policy SourcePolicyProvider session.Attachable Ref string + + // internal solver state + s solverState +} + +type solverState struct { + exporterOpt *exporterOptions + cacheOpt *cacheOptions + // Only one of runGateway or def can be set. + // runGateway optionally defines the gateway callback + runGateway runGatewayCB + // def optionally defines the LLB definition for the client + def *llb.Definition +} + +type exporterOptions struct { + // storesToUpdate maps exporter ID -> oci store + storesToUpdate map[string]ociStore +} + +type cacheOptions struct { + options controlapi.CacheOptions + contentStores map[string]content.Store // key: ID of content store ("local:" + csDir) + storesToUpdate map[string]ociStore // key: exporter ID + frontendAttrs map[string]string +} + +type ociStore struct { + path string + tag string } type ExportEntry struct { @@ -61,11 +94,19 @@ type ExportEntry struct { Output filesync.FileOutputFunc // for ExporterOCI and ExporterDocker OutputDir string // for ExporterLocal OutputStore content.Store + + // id identifies the exporter in the configuration. + // Will be assigned automatically and should not be set by the user. + id string } type CacheOptionsEntry struct { Type string Attrs map[string]string + + // id identifies the exporter in the configuration. + // Will be assigned automatically and should not be set by the user. + id string } // Solve calls Solve on the controller. @@ -83,24 +124,141 @@ func (c *Client) Solve(ctx context.Context, def *llb.Definition, opt SolveOpt, s if opt.Frontend != "" && def != nil { return nil, errors.Errorf("invalid definition for frontend %s", opt.Frontend) } + opt.s.def = def - return c.solve(ctx, def, nil, opt, statusChan) + return c.solve(ctx, opt, statusChan) } type runGatewayCB func(ref string, s *session.Session, opts map[string]string) error -func (c *Client) solve(ctx context.Context, def *llb.Definition, runGateway runGatewayCB, opt SolveOpt, statusChan chan *SolveStatus) (*SolveResponse, error) { - if def != nil && runGateway != nil { - return nil, errors.New("invalid with def and cb") +// Init initializes the SolveOpt. +// It parses and initializes the cache exports/imports and output exporters. +func (opt *SolveOpt) Init(ctx context.Context, s *session.Session) error { + opt.initExporterIDs() + if err := opt.parseCacheOptions(ctx); err != nil { + return err } + return opt.parseExporterOptions(s) +} - mounts, err := prepareMounts(&opt) +func (opt *SolveOpt) initExporterIDs() { + for i := range opt.Exports { + opt.Exports[i].id = strconv.Itoa(i) + } + for i := range opt.CacheExports { + opt.CacheExports[i].id = strconv.Itoa(i) + } +} + +// parseExporterOptions configures the specified session with the underlying exporter configuration. +// It needs to be invoked *after* ParseCacheOpts +func (opt *SolveOpt) parseExporterOptions(s *session.Session) error { + if opt.s.exporterOpt != nil { + return nil + } + + mounts, err := prepareMounts(opt) if err != nil { - return nil, err + return err } - syncedDirs, err := prepareSyncedFiles(def, mounts) + syncedDirs, err := prepareSyncedFiles(opt.s.def, mounts) if err != nil { - return nil, err + return err + } + + if len(syncedDirs) > 0 { + s.Allow(filesync.NewFSSyncProvider(syncedDirs)) + } + + for _, a := range opt.Session { + s.Allow(a) + } + + contentStores := map[string]content.Store{} + maps.Copy(contentStores, opt.s.cacheOpt.contentStores) + for key, store := range opt.OCIStores { + key2 := "oci:" + key + if _, ok := contentStores[key2]; ok { + return errors.Errorf("oci store key %q already exists", key) + } + contentStores[key2] = store + } + + opt.s.exporterOpt = &exporterOptions{} + var syncTargets []filesync.FSSyncTarget + for _, ex := range opt.Exports { + var supportFile, supportDir, supportStore bool + switch ex.Type { + case ExporterLocal: + supportDir = true + case ExporterTar: + supportFile = true + case ExporterOCI, ExporterDocker: + supportFile = ex.Output != nil + supportStore = ex.OutputStore != nil || ex.OutputDir != "" + if supportFile && supportStore { + return errors.Errorf("both file and store output is not supported by %s exporter", ex.Type) + } + } + if !supportFile && ex.Output != nil { + return errors.Errorf("output file writer is not supported by %s exporter", ex.Type) + } + if !supportDir && !supportStore && ex.OutputDir != "" { + return errors.Errorf("output directory is not supported by %s exporter", ex.Type) + } + if !supportStore && ex.OutputStore != nil { + return errors.Errorf("output store is not supported by %s exporter", ex.Type) + } + if supportFile { + if ex.Output == nil { + return errors.Errorf("output file writer is required for %s exporter", ex.Type) + } + syncTargets = append(syncTargets, filesync.WithFSSync(ex.id, ex.Output)) + } + if supportDir { + if ex.OutputDir == "" { + return errors.Errorf("output directory is required for %s exporter", ex.Type) + } + syncTargets = append(syncTargets, filesync.WithFSSyncDir(ex.id, ex.OutputDir)) + } + if supportStore { + store := ex.OutputStore + if store == nil { + if err := os.MkdirAll(ex.OutputDir, 0755); err != nil { + return err + } + store, err = contentlocal.NewStore(ex.OutputDir) + if err != nil { + return err + } + if opt.s.exporterOpt.storesToUpdate == nil { + opt.s.exporterOpt.storesToUpdate = make(map[string]ociStore) + } + opt.s.exporterOpt.storesToUpdate[ex.id] = ociStore{path: ex.OutputDir} + } + + // TODO: this should be dependent on the exporter id (to allow multiple oci exporters) + storeName := "export" + if _, ok := contentStores[storeName]; ok { + return errors.Errorf("oci store key %q already exists", storeName) + } + contentStores[storeName] = store + } + } + + if len(contentStores) > 0 { + s.Allow(sessioncontent.NewAttachable(contentStores)) + } + + if len(syncTargets) > 0 { + s.Allow(filesync.NewFSSyncTarget(syncTargets...)) + } + return nil +} + +func (c *Client) solve(ctx context.Context, opt SolveOpt, statusChan chan *SolveStatus) (*SolveResponse, error) { + if opt.s.def != nil && opt.s.runGateway != nil { + return nil, errors.New("invalid with def and cb") } ref := identity.NewID() @@ -117,107 +275,27 @@ func (c *Client) solve(ctx context.Context, def *llb.Definition, runGateway runG } s := opt.SharedSession - if s == nil { if opt.SessionPreInitialized { return nil, errors.Errorf("no session provided for preinitialized option") } + var err error s, err = session.NewSession(statusContext, opt.SharedKey) if err != nil { return nil, errors.Wrap(err, "failed to create session") } } - cacheOpt, err := parseCacheOptions(ctx, runGateway != nil, opt) + opt.initExporterIDs() + + err := opt.parseCacheOptions(ctx) if err != nil { return nil, err } - storesToUpdate := []string{} - if !opt.SessionPreInitialized { - if len(syncedDirs) > 0 { - s.Allow(filesync.NewFSSyncProvider(syncedDirs)) - } - - for _, a := range opt.Session { - s.Allow(a) - } - - contentStores := map[string]content.Store{} - maps.Copy(contentStores, cacheOpt.contentStores) - for key, store := range opt.OCIStores { - key2 := "oci:" + key - if _, ok := contentStores[key2]; ok { - return nil, errors.Errorf("oci store key %q already exists", key) - } - contentStores[key2] = store - } - - var syncTargets []filesync.FSSyncTarget - for exID, ex := range opt.Exports { - var supportFile, supportDir, supportStore bool - switch ex.Type { - case ExporterLocal: - supportDir = true - case ExporterTar: - supportFile = true - case ExporterOCI, ExporterDocker: - supportFile = ex.Output != nil - supportStore = ex.OutputStore != nil || ex.OutputDir != "" - if supportFile && supportStore { - return nil, errors.Errorf("both file and store output is not supported by %s exporter", ex.Type) - } - } - if !supportFile && ex.Output != nil { - return nil, errors.Errorf("output file writer is not supported by %s exporter", ex.Type) - } - if !supportDir && !supportStore && ex.OutputDir != "" { - return nil, errors.Errorf("output directory is not supported by %s exporter", ex.Type) - } - if !supportStore && ex.OutputStore != nil { - return nil, errors.Errorf("output store is not supported by %s exporter", ex.Type) - } - if supportFile { - if ex.Output == nil { - return nil, errors.Errorf("output file writer is required for %s exporter", ex.Type) - } - syncTargets = append(syncTargets, filesync.WithFSSync(exID, ex.Output)) - } - if supportDir { - if ex.OutputDir == "" { - return nil, errors.Errorf("output directory is required for %s exporter", ex.Type) - } - syncTargets = append(syncTargets, filesync.WithFSSyncDir(exID, ex.OutputDir)) - } - if supportStore { - store := ex.OutputStore - if store == nil { - if err := os.MkdirAll(ex.OutputDir, 0755); err != nil { - return nil, err - } - store, err = contentlocal.NewStore(ex.OutputDir) - if err != nil { - return nil, err - } - storesToUpdate = append(storesToUpdate, ex.OutputDir) - } - - // TODO: this should be dependent on the exporter id (to allow multiple oci exporters) - storeName := "export" - if _, ok := contentStores[storeName]; ok { - return nil, errors.Errorf("oci store key %q already exists", storeName) - } - contentStores[storeName] = store - } - } - - if len(contentStores) > 0 { - s.Allow(sessioncontent.NewAttachable(contentStores)) - } - - if len(syncTargets) > 0 { - s.Allow(filesync.NewFSSyncTarget(syncTargets...)) + if err := opt.parseExporterOptions(s); err != nil { + return nil, err } if opt.SourcePolicyProvider != nil { @@ -234,7 +312,7 @@ func (c *Client) solve(ctx context.Context, def *llb.Definition, runGateway runG } frontendAttrs := maps.Clone(opt.FrontendAttrs) - maps.Copy(frontendAttrs, cacheOpt.frontendAttrs) + maps.Copy(frontendAttrs, opt.s.cacheOpt.frontendAttrs) const statusInactivityTimeout = 5 * time.Second statusActivity := make(chan struct{}, 1) @@ -269,8 +347,8 @@ func (c *Client) solve(ctx context.Context, def *llb.Definition, runGateway runG } }() var pbd *pb.Definition - if def != nil { - pbd = def.ToPB() + if opt.s.def != nil { + pbd = opt.s.def.ToPB() } frontendInputs := make(map[string]*pb.Definition) @@ -293,6 +371,7 @@ func (c *Client) solve(ctx context.Context, def *llb.Definition, runGateway runG exports = append(exports, &controlapi.Exporter{ Type: exp.Type, Attrs: exp.Attrs, + ID: exp.id, }) } @@ -307,7 +386,7 @@ func (c *Client) solve(ctx context.Context, def *llb.Definition, runGateway runG Frontend: opt.Frontend, FrontendAttrs: frontendAttrs, FrontendInputs: frontendInputs, - Cache: &cacheOpt.options, + Cache: &opt.s.cacheOpt.options, Entitlements: slices.Clone(opt.AllowedEntitlements), Internal: opt.Internal, SourcePolicy: opt.SourcePolicy, @@ -321,14 +400,30 @@ func (c *Client) solve(ctx context.Context, def *llb.Definition, runGateway runG return errors.Wrap(err, "failed to solve") } res = &SolveResponse{ - ExporterResponse: resp.ExporterResponse, + ExporterResponse: resp.ExporterResponseDeprecated, + ExporterResponses: make([]ExporterResponse, 0, len(resp.ExporterResponses)), + CacheExporterResponses: make([]ExporterResponse, 0, len(resp.CacheExporterResponses)), + } + for _, resp := range resp.ExporterResponses { + res.ExporterResponses = append(res.ExporterResponses, ExporterResponse{ + ID: resp.Metadata.ID, + Type: resp.Metadata.Type, + Data: resp.Data, + }) + } + for _, resp := range resp.CacheExporterResponses { + res.CacheExporterResponses = append(res.CacheExporterResponses, ExporterResponse{ + ID: resp.Metadata.ID, + Type: resp.Metadata.Type, + Data: resp.Data, + }) } return nil }) - if runGateway != nil { + if opt.s.runGateway != nil { eg.Go(func() error { - err := runGateway(ref, s, frontendAttrs) + err := opt.s.runGateway(ref, s, frontendAttrs) if err == nil { return nil } @@ -381,47 +476,93 @@ func (c *Client) solve(ctx context.Context, def *llb.Definition, runGateway runG if err := eg.Wait(); err != nil { return nil, err } - // Update index.json of exported cache content store - // FIXME(AkihiroSuda): dedupe const definition of cache/remotecache.ExporterResponseManifestDesc = "cache.manifest" - if manifestDescJSON := res.ExporterResponse["cache.manifest"]; manifestDescJSON != "" { - var manifestDesc ocispecs.Descriptor - if err = json.Unmarshal([]byte(manifestDescJSON), &manifestDesc); err != nil { + + for id, store := range opt.s.cacheOpt.storesToUpdate { + // Update index.json of exported cache content store + manifestDesc, err := getCacheManifestDescriptor(id, res) + if err != nil { return nil, err } - for storePath, tag := range cacheOpt.storesToUpdate { - idx := ociindex.NewStoreIndex(storePath) - if err := idx.Put(manifestDesc, ociindex.Tag(tag)); err != nil { - return nil, err - } + if manifestDesc == nil { + continue + } + idx := ociindex.NewStoreIndex(store.path) + if err := idx.Put(*manifestDesc, ociindex.Tag(store.tag)); err != nil { + return nil, err } } - if manifestDescDt := res.ExporterResponse[exptypes.ExporterImageDescriptorKey]; manifestDescDt != "" { - manifestDescDt, err := base64.StdEncoding.DecodeString(manifestDescDt) + + if len(opt.s.exporterOpt.storesToUpdate) == 0 { + return res, nil + } + for id, store := range opt.s.exporterOpt.storesToUpdate { + manifestDesc, err := getImageManifestDescriptor(id, res) if err != nil { return nil, err } - var manifestDesc ocispecs.Descriptor - if err = json.Unmarshal(manifestDescDt, &manifestDesc); err != nil { - return nil, err + if manifestDesc == nil { + continue } - for _, storePath := range storesToUpdate { - names := []ociindex.NameOrTag{ociindex.Tag("latest")} - if t, ok := res.ExporterResponse[exptypes.ExporterImageNameKey]; ok { + names := []ociindex.NameOrTag{ociindex.Tag("latest")} + if resp := res.exporter(id); resp != nil { + if t, ok := resp.Data[exptypes.ExporterImageNameKey]; ok { inp := strings.Split(t, ",") names = make([]ociindex.NameOrTag, len(inp)) for i, n := range inp { names[i] = ociindex.Name(n) } } - idx := ociindex.NewStoreIndex(storePath) - if err := idx.Put(manifestDesc, names...); err != nil { - return nil, err - } + } + idx := ociindex.NewStoreIndex(store.path) + if err := idx.Put(*manifestDesc, names...); err != nil { + return nil, err } } return res, nil } +func getCacheManifestDescriptor(exporterID string, resp *SolveResponse) (*ocispecs.Descriptor, error) { + const exporterResponseManifestDesc = "cache.manifest" + if resp := resp.cacheExporter(exporterID); resp != nil { + // FIXME(AkihiroSuda): dedupe const definition of cache/remotecache.ExporterResponseManifestDesc = "cache.manifest" + if manifestDescDt := resp.Data[exporterResponseManifestDesc]; manifestDescDt != "" { + return unmarshalManifestDescriptor(manifestDescDt) + } + } + if manifestDescDt := resp.ExporterResponse[exporterResponseManifestDesc]; manifestDescDt != "" { + return unmarshalManifestDescriptor(manifestDescDt) + } + return nil, nil +} + +func getImageManifestDescriptor(exporterID string, resp *SolveResponse) (*ocispecs.Descriptor, error) { + if resp := resp.exporter(exporterID); resp != nil { + if manifestDescDt := resp.Data[exptypes.ExporterImageDescriptorKey]; manifestDescDt != "" { + return unmarshalEncodedManifestDescriptor(manifestDescDt) + } + } + if manifestDescDt := resp.ExporterResponse[exptypes.ExporterImageDescriptorKey]; manifestDescDt != "" { + return unmarshalEncodedManifestDescriptor(manifestDescDt) + } + return nil, nil +} + +func unmarshalEncodedManifestDescriptor(base64Payload string) (*ocispecs.Descriptor, error) { + manifestDescDt, err := base64.StdEncoding.DecodeString(base64Payload) + if err != nil { + return nil, err + } + return unmarshalManifestDescriptor(string(manifestDescDt)) +} + +func unmarshalManifestDescriptor(manifestDescJSON string) (*ocispecs.Descriptor, error) { + var desc ocispecs.Descriptor + if err := json.Unmarshal([]byte(manifestDescJSON), &desc); err != nil { + return nil, err + } + return &desc, nil +} + func prepareSyncedFiles(def *llb.Definition, localMounts map[string]fsutil.FS) (filesync.StaticDirSource, error) { resetUIDAndGID := func(p string, st *fstypes.Stat) fsutil.MapResult { st.Uid = 0 @@ -466,33 +607,29 @@ func prepareSyncedFiles(def *llb.Definition, localMounts map[string]fsutil.FS) ( return result, nil } -type cacheOptions struct { - options controlapi.CacheOptions - contentStores map[string]content.Store // key: ID of content store ("local:" + csDir) - storesToUpdate map[string]string // key: path to content store, value: tag - frontendAttrs map[string]string -} - -func parseCacheOptions(ctx context.Context, isGateway bool, opt SolveOpt) (*cacheOptions, error) { +func (opt *SolveOpt) parseCacheOptions(ctx context.Context) error { + if opt.s.cacheOpt != nil { + return nil + } var ( cacheExports []*controlapi.CacheOptionsEntry cacheImports []*controlapi.CacheOptionsEntry ) contentStores := make(map[string]content.Store) - storesToUpdate := make(map[string]string) + storesToUpdate := make(map[string]ociStore) frontendAttrs := make(map[string]string) for _, ex := range opt.CacheExports { if ex.Type == "local" { csDir := ex.Attrs["dest"] if csDir == "" { - return nil, errors.New("local cache exporter requires dest") + return errors.New("local cache exporter requires dest") } if err := os.MkdirAll(csDir, 0755); err != nil { - return nil, err + return err } cs, err := contentlocal.NewStore(csDir) if err != nil { - return nil, err + return err } contentStores["local:"+csDir] = cs @@ -500,25 +637,25 @@ func parseCacheOptions(ctx context.Context, isGateway bool, opt SolveOpt) (*cach if t, ok := ex.Attrs["tag"]; ok { tag = t } - // TODO(AkihiroSuda): support custom index JSON path and tag - storesToUpdate[csDir] = tag + storesToUpdate[ex.id] = ociStore{path: csDir, tag: tag} } if ex.Type == "registry" { regRef := ex.Attrs["ref"] if regRef == "" { - return nil, errors.New("registry cache exporter requires ref") + return errors.New("registry cache exporter requires ref") } } cacheExports = append(cacheExports, &controlapi.CacheOptionsEntry{ Type: ex.Type, Attrs: ex.Attrs, + ID: ex.id, }) } for _, im := range opt.CacheImports { if im.Type == "local" { csDir := im.Attrs["src"] if csDir == "" { - return nil, errors.New("local cache importer requires src") + return errors.New("local cache importer requires src") } cs, err := contentlocal.NewStore(csDir) if err != nil { @@ -543,14 +680,14 @@ func parseCacheOptions(ctx context.Context, isGateway bool, opt SolveOpt) (*cach } } if im.Attrs["digest"] == "" { - return nil, errors.New("local cache importer requires either explicit digest, \"latest\" tag or custom tag on index.json") + return errors.New("local cache importer requires either explicit digest, \"latest\" tag or custom tag on index.json") } contentStores["local:"+csDir] = cs } if im.Type == "registry" { regRef := im.Attrs["ref"] if regRef == "" { - return nil, errors.New("registry cache importer requires ref") + return errors.New("registry cache importer requires ref") } } cacheImports = append(cacheImports, &controlapi.CacheOptionsEntry{ @@ -558,16 +695,16 @@ func parseCacheOptions(ctx context.Context, isGateway bool, opt SolveOpt) (*cach Attrs: im.Attrs, }) } - if opt.Frontend != "" || isGateway { + if opt.Frontend != "" || opt.s.runGateway != nil { if len(cacheImports) > 0 { s, err := json.Marshal(cacheImports) if err != nil { - return nil, err + return err } frontendAttrs["cache-imports"] = string(s) } } - res := cacheOptions{ + opt.s.cacheOpt = &cacheOptions{ options: controlapi.CacheOptions{ Exports: cacheExports, Imports: cacheImports, @@ -576,7 +713,7 @@ func parseCacheOptions(ctx context.Context, isGateway bool, opt SolveOpt) (*cach storesToUpdate: storesToUpdate, frontendAttrs: frontendAttrs, } - return &res, nil + return nil } func prepareMounts(opt *SolveOpt) (map[string]fsutil.FS, error) { diff --git a/control/control.go b/control/control.go index 64b0ee1ccc49..df3a57fdb9b9 100644 --- a/control/control.go +++ b/control/control.go @@ -22,7 +22,6 @@ import ( "github.com/moby/buildkit/client/llb" "github.com/moby/buildkit/cmd/buildkitd/config" controlgateway "github.com/moby/buildkit/control/gateway" - "github.com/moby/buildkit/exporter" "github.com/moby/buildkit/exporter/containerimage/exptypes" "github.com/moby/buildkit/exporter/util/epoch" "github.com/moby/buildkit/frontend" @@ -411,18 +410,22 @@ func (c *Controller) Solve(ctx context.Context, req *controlapi.SolveRequest) (* } } - var expis []exporter.ExporterInstance - for i, ex := range req.Exporters { + var expis []llbsolver.Exporter + for _, ex := range req.Exporters { exp, err := w.Exporter(ex.Type, c.opt.SessionManager) if err != nil { return nil, err } bklog.G(ctx).Debugf("resolve exporter %s with %v", ex.Type, ex.Attrs) - expi, err := exp.Resolve(ctx, i, ex.Attrs) + expi, err := exp.Resolve(ctx, ex.Attrs) if err != nil { return nil, err } - expis = append(expis, expi) + expis = append(expis, llbsolver.Exporter{ + ID: ex.ID, + ExporterAPIs: llbsolver.NewExporterAPIs(ex.ID), + ExporterInstance: expi, + }) } rest, dupes, err := findDuplicateCacheOptions(req.Cache.Exports) @@ -442,7 +445,7 @@ func (c *Controller) Solve(ctx context.Context, req *controlapi.SolveRequest) (* if !ok { return nil, errors.Errorf("unknown cache exporter: %q", e.Type) } - var exp llbsolver.RemoteCacheExporter + exp := llbsolver.RemoteCacheExporter{ID: e.ID} exp.Exporter, err = cacheExporterFunc(ctx, session.NewGroup(req.Session), e.Attrs) if err != nil { return nil, errors.Wrapf(err, "failed to configure %v cache exporter", e.Type) @@ -545,9 +548,7 @@ func (c *Controller) Solve(ctx context.Context, req *controlapi.SolveRequest) (* if err != nil { return nil, err } - return &controlapi.SolveResponse{ - ExporterResponse: resp.ExporterResponse, - }, nil + return resp, nil } func (c *Controller) Status(req *controlapi.StatusRequest, stream controlapi.Control_StatusServer) error { diff --git a/exporter/containerimage/export.go b/exporter/containerimage/export.go index b5910e00b242..c0648b1c9c20 100644 --- a/exporter/containerimage/export.go +++ b/exporter/containerimage/export.go @@ -68,10 +68,9 @@ func New(opt Opt) (exporter.Exporter, error) { return im, nil } -func (e *imageExporter) Resolve(ctx context.Context, id int, opt map[string]string) (exporter.ExporterInstance, error) { +func (e *imageExporter) Resolve(ctx context.Context, opt map[string]string) (exporter.ExporterInstance, error) { i := &imageExporterInstance{ imageExporter: e, - id: id, attrs: opt, opts: ImageCommitOpts{ RefCfg: cacheconfig.RefConfig{ @@ -183,7 +182,6 @@ func (e *imageExporter) Resolve(ctx context.Context, id int, opt map[string]stri type imageExporterInstance struct { *imageExporter - id int attrs map[string]string opts ImageCommitOpts @@ -199,10 +197,6 @@ type imageExporterInstance struct { meta map[string][]byte } -func (e *imageExporterInstance) ID() int { - return e.id -} - func (e *imageExporterInstance) Name() string { return "exporting to image" } @@ -219,7 +213,7 @@ func (e *imageExporterInstance) Attrs() map[string]string { return e.attrs } -func (e *imageExporterInstance) Export(ctx context.Context, src *exporter.Source, buildInfo exporter.ExportBuildInfo) (_ map[string]string, descref exporter.DescriptorReference, err error) { +func (e *imageExporterInstance) Export(ctx context.Context, src *exporter.Source, buildInfo exporter.ExportBuildInfo, _ exporter.ExporterAPIs) (_ map[string]string, descref exporter.DescriptorReference, err error) { src = src.Clone() if src.Metadata == nil { src.Metadata = make(map[string][]byte) diff --git a/exporter/exporter.go b/exporter/exporter.go index b098657a2d74..95ace960d6f5 100644 --- a/exporter/exporter.go +++ b/exporter/exporter.go @@ -2,12 +2,15 @@ package exporter import ( "context" + "io" "github.com/moby/buildkit/cache" "github.com/moby/buildkit/exporter/containerimage/exptypes" + "github.com/moby/buildkit/session" "github.com/moby/buildkit/solver/result" "github.com/moby/buildkit/util/compression" ocispecs "github.com/opencontainers/image-spec/specs-go/v1" + "github.com/tonistiigi/fsutil" ) type Source = result.Result[cache.ImmutableRef] @@ -15,16 +18,15 @@ type Source = result.Result[cache.ImmutableRef] type Attestation = result.Attestation[cache.ImmutableRef] type Exporter interface { - Resolve(context.Context, int, map[string]string) (ExporterInstance, error) + Resolve(context.Context, map[string]string) (ExporterInstance, error) } type ExporterInstance interface { - ID() int Name() string Config() *Config Type() string Attrs() map[string]string - Export(ctx context.Context, src *Source, buildInfo ExportBuildInfo) (map[string]string, DescriptorReference, error) + Export(ctx context.Context, src *Source, buildInfo ExportBuildInfo, apis ExporterAPIs) (map[string]string, DescriptorReference, error) } type ExportBuildInfo struct { @@ -32,6 +34,11 @@ type ExportBuildInfo struct { InlineCache exptypes.InlineCache SessionID string } +// ExporterAPIs encapsulates the APIs for exporters that stream results to clients +type ExporterAPIs struct { + CopyFileWriter func(_ context.Context, md map[string]string, c session.Caller) (io.WriteCloser, error) + CopyToCaller func(_ context.Context, fs fsutil.FS, c session.Caller, progress func(int, bool)) error +} type DescriptorReference interface { Release() error diff --git a/exporter/exptypes/keys.go b/exporter/exptypes/keys.go index 4b568154fff1..03d8cc906253 100644 --- a/exporter/exptypes/keys.go +++ b/exporter/exptypes/keys.go @@ -7,7 +7,7 @@ const ( type ExporterOptKey string // Options keys supported by all exporters. -var ( +const ( // Clamp produced timestamps. For more information see the // SOURCE_DATE_EPOCH specification. // Value: int (number of seconds since Unix epoch) diff --git a/exporter/local/export.go b/exporter/local/export.go index cd3ef3b9d39b..ac9bdec2b19b 100644 --- a/exporter/local/export.go +++ b/exporter/local/export.go @@ -13,7 +13,6 @@ import ( "github.com/moby/buildkit/exporter/containerimage/exptypes" "github.com/moby/buildkit/exporter/util/epoch" "github.com/moby/buildkit/session" - "github.com/moby/buildkit/session/filesync" "github.com/moby/buildkit/util/progress" "github.com/pkg/errors" "github.com/tonistiigi/fsutil" @@ -36,9 +35,8 @@ func New(opt Opt) (exporter.Exporter, error) { return le, nil } -func (e *localExporter) Resolve(ctx context.Context, id int, opt map[string]string) (exporter.ExporterInstance, error) { +func (e *localExporter) Resolve(ctx context.Context, opt map[string]string) (exporter.ExporterInstance, error) { i := &localExporterInstance{ - id: id, attrs: opt, localExporter: e, } @@ -52,16 +50,11 @@ func (e *localExporter) Resolve(ctx context.Context, id int, opt map[string]stri type localExporterInstance struct { *localExporter - id int attrs map[string]string opts CreateFSOpts } -func (e *localExporterInstance) ID() int { - return e.id -} - func (e *localExporterInstance) Name() string { return "exporting to client directory" } @@ -78,7 +71,7 @@ func (e *localExporter) Config() *exporter.Config { return exporter.NewConfig() } -func (e *localExporterInstance) Export(ctx context.Context, inp *exporter.Source, buildInfo exporter.ExportBuildInfo) (map[string]string, exporter.DescriptorReference, error) { +func (e *localExporterInstance) Export(ctx context.Context, inp *exporter.Source, buildInfo exporter.ExportBuildInfo, apis exporter.ExporterAPIs) (map[string]string, exporter.DescriptorReference, error) { timeoutCtx, cancel := context.WithCancelCause(ctx) timeoutCtx, _ = context.WithTimeoutCause(timeoutCtx, 5*time.Second, errors.WithStack(context.DeadlineExceeded)) //nolint:govet defer func() { cancel(errors.WithStack(context.Canceled)) }() @@ -162,7 +155,7 @@ func (e *localExporterInstance) Export(ctx context.Context, inp *exporter.Source } progress := NewProgressHandler(ctx, lbl) - if err := filesync.CopyToCaller(ctx, outputFS, e.id, caller, progress); err != nil { + if err := apis.CopyToCaller(ctx, outputFS, caller, progress); err != nil { return err } return nil diff --git a/exporter/oci/export.go b/exporter/oci/export.go index 17e7c41571f8..b1136fe26355 100644 --- a/exporter/oci/export.go +++ b/exporter/oci/export.go @@ -21,7 +21,6 @@ import ( "github.com/moby/buildkit/exporter/containerimage/exptypes" "github.com/moby/buildkit/session" sessioncontent "github.com/moby/buildkit/session/content" - "github.com/moby/buildkit/session/filesync" "github.com/moby/buildkit/util/compression" "github.com/moby/buildkit/util/contentutil" "github.com/moby/buildkit/util/grpcerrors" @@ -60,10 +59,9 @@ func New(opt Opt) (exporter.Exporter, error) { return im, nil } -func (e *imageExporter) Resolve(ctx context.Context, id int, opt map[string]string) (exporter.ExporterInstance, error) { +func (e *imageExporter) Resolve(ctx context.Context, opt map[string]string) (exporter.ExporterInstance, error) { i := &imageExporterInstance{ imageExporter: e, - id: id, attrs: opt, tar: true, opts: containerimage.ImageCommitOpts{ @@ -103,7 +101,6 @@ func (e *imageExporter) Resolve(ctx context.Context, id int, opt map[string]stri type imageExporterInstance struct { *imageExporter - id int attrs map[string]string opts containerimage.ImageCommitOpts @@ -111,10 +108,6 @@ type imageExporterInstance struct { meta map[string][]byte } -func (e *imageExporterInstance) ID() int { - return e.id -} - func (e *imageExporterInstance) Name() string { return fmt.Sprintf("exporting to %s image format", e.opt.Variant) } @@ -131,7 +124,7 @@ func (e *imageExporterInstance) Config() *exporter.Config { return exporter.NewConfigWithCompression(e.opts.RefCfg.Compression) } -func (e *imageExporterInstance) Export(ctx context.Context, src *exporter.Source, buildInfo exporter.ExportBuildInfo) (_ map[string]string, descref exporter.DescriptorReference, err error) { +func (e *imageExporterInstance) Export(ctx context.Context, src *exporter.Source, buildInfo exporter.ExportBuildInfo, apis exporter.ExporterAPIs) (_ map[string]string, descref exporter.DescriptorReference, err error) { if e.opt.Variant == VariantDocker && len(src.Refs) > 0 { return nil, nil, errors.Errorf("docker exporter does not currently support exporting manifest lists") } @@ -260,7 +253,7 @@ func (e *imageExporterInstance) Export(ctx context.Context, src *exporter.Source } if e.tar { - w, err := filesync.CopyFileWriter(ctx, resp, e.id, caller) + w, err := apis.CopyFileWriter(ctx, resp, caller) if err != nil { return nil, nil, err } diff --git a/exporter/tar/export.go b/exporter/tar/export.go index 23b75b682981..4d2315a950b6 100644 --- a/exporter/tar/export.go +++ b/exporter/tar/export.go @@ -13,7 +13,6 @@ import ( "github.com/moby/buildkit/exporter/local" "github.com/moby/buildkit/exporter/util/epoch" "github.com/moby/buildkit/session" - "github.com/moby/buildkit/session/filesync" "github.com/moby/buildkit/util/progress" "github.com/pkg/errors" "github.com/tonistiigi/fsutil" @@ -34,33 +33,26 @@ func New(opt Opt) (exporter.Exporter, error) { return le, nil } -func (e *localExporter) Resolve(ctx context.Context, id int, opt map[string]string) (exporter.ExporterInstance, error) { +func (e *localExporter) Resolve(ctx context.Context, opt map[string]string) (exporter.ExporterInstance, error) { li := &localExporterInstance{ localExporter: e, - id: id, attrs: opt, } _, err := li.opts.Load(opt) if err != nil { return nil, err } - _ = opt return li, nil } type localExporterInstance struct { *localExporter - id int attrs map[string]string opts local.CreateFSOpts } -func (e *localExporterInstance) ID() int { - return e.id -} - func (e *localExporterInstance) Name() string { return "exporting to client tarball" } @@ -77,7 +69,7 @@ func (e *localExporterInstance) Config() *exporter.Config { return exporter.NewConfig() } -func (e *localExporterInstance) Export(ctx context.Context, inp *exporter.Source, buildInfo exporter.ExportBuildInfo) (map[string]string, exporter.DescriptorReference, error) { +func (e *localExporterInstance) Export(ctx context.Context, inp *exporter.Source, buildInfo exporter.ExportBuildInfo, apis exporter.ExporterAPIs) (map[string]string, exporter.DescriptorReference, error) { var defers []func() error defer func() { @@ -172,7 +164,7 @@ func (e *localExporterInstance) Export(ctx context.Context, inp *exporter.Source return nil, nil, err } - w, err := filesync.CopyFileWriter(ctx, nil, e.id, caller) + w, err := apis.CopyFileWriter(ctx, nil, caller) if err != nil { return nil, nil, err } diff --git a/session/filesync/filesync.go b/session/filesync/filesync.go index 230493f1690f..1aec4244c930 100644 --- a/session/filesync/filesync.go +++ b/session/filesync/filesync.go @@ -252,7 +252,7 @@ type FSSyncTarget interface { } type fsSyncTarget struct { - id int + id string outdir string f FileOutputFunc } @@ -261,14 +261,14 @@ func (target *fsSyncTarget) target() *fsSyncTarget { return target } -func WithFSSync(id int, f FileOutputFunc) FSSyncTarget { +func WithFSSync(id string, f FileOutputFunc) FSSyncTarget { return &fsSyncTarget{ id: id, f: f, } } -func WithFSSyncDir(id int, outdir string) FSSyncTarget { +func WithFSSyncDir(id, outdir string) FSSyncTarget { return &fsSyncTarget{ id: id, outdir: outdir, @@ -277,16 +277,18 @@ func WithFSSyncDir(id int, outdir string) FSSyncTarget { func NewFSSyncTarget(targets ...FSSyncTarget) *SyncTarget { st := &SyncTarget{ - fs: make(map[int]FileOutputFunc), - outdirs: make(map[int]string), + fs: make(map[string]FileOutputFunc), + outdirs: make(map[string]string), } st.Add(targets...) return st } type SyncTarget struct { - fs map[int]FileOutputFunc - outdirs map[int]string + // maps exporter id -> file output handler + fs map[string]FileOutputFunc + // maps exporter id -> output directory + outdirs map[string]string } var _ session.Attachable = &SyncTarget{} @@ -307,20 +309,16 @@ func (sp *SyncTarget) Register(server *grpc.Server) { RegisterFileSendServer(server, sp) } -func (sp *SyncTarget) chooser(ctx context.Context) int { +func (sp *SyncTarget) chooser(ctx context.Context) string { md, ok := metadata.FromIncomingContext(ctx) if !ok { - return 0 + return "" } values := md[keyExporterID] if len(values) == 0 { - return 0 + return "" } - id, err := strconv.ParseInt(values[0], 10, 64) - if err != nil { - return 0 - } - return int(id) + return values[0] } func (sp *SyncTarget) DiffCopy(stream FileSend_DiffCopyServer) (err error) { @@ -330,7 +328,7 @@ func (sp *SyncTarget) DiffCopy(stream FileSend_DiffCopyServer) (err error) { } f, ok := sp.fs[id] if !ok { - return errors.Errorf("exporter %d not found", id) + return errors.Errorf("exporter %s not found", id) } opts, _ := metadata.FromIncomingContext(stream.Context()) // if no metadata continue with empty object @@ -356,7 +354,7 @@ func (sp *SyncTarget) DiffCopy(stream FileSend_DiffCopyServer) (err error) { return writeTargetFile(stream, wc) } -func CopyToCaller(ctx context.Context, fs fsutil.FS, id int, c session.Caller, progress func(int, bool)) error { +func CopyToCaller(ctx context.Context, fs fsutil.FS, id string, c session.Caller, progress func(int, bool)) error { method := session.MethodURL(FileSend_ServiceDesc.ServiceName, "diffcopy") if !c.Supports(method) { return errors.Errorf("method %s not supported by the client", method) @@ -371,7 +369,7 @@ func CopyToCaller(ctx context.Context, fs fsutil.FS, id int, c session.Caller, p if existingVal, ok := opts[keyExporterID]; ok { bklog.G(ctx).Warnf("overwriting grpc metadata key %q from value %+v to %+v", keyExporterID, existingVal, id) } - opts[keyExporterID] = []string{fmt.Sprint(id)} + opts[keyExporterID] = []string{id} ctx = metadata.NewOutgoingContext(ctx, opts) cc, err := client.DiffCopy(ctx) @@ -382,7 +380,7 @@ func CopyToCaller(ctx context.Context, fs fsutil.FS, id int, c session.Caller, p return sendDiffCopy(cc, fs, progress) } -func CopyFileWriter(ctx context.Context, md map[string]string, id int, c session.Caller) (io.WriteCloser, error) { +func CopyFileWriter(ctx context.Context, md map[string]string, id string, c session.Caller) (io.WriteCloser, error) { method := session.MethodURL(FileSend_ServiceDesc.ServiceName, "diffcopy") if !c.Supports(method) { return nil, errors.Errorf("method %s not supported by the client", method) @@ -404,7 +402,7 @@ func CopyFileWriter(ctx context.Context, md map[string]string, id int, c session if existingVal, ok := opts[keyExporterID]; ok { bklog.G(ctx).Warnf("overwriting grpc metadata key %q from value %+v to %+v", keyExporterID, existingVal, id) } - opts[keyExporterID] = []string{fmt.Sprint(id)} + opts[keyExporterID] = []string{id} ctx = metadata.NewOutgoingContext(ctx, opts) cc, err := client.DiffCopy(ctx) diff --git a/solver/llbsolver/solver.go b/solver/llbsolver/solver.go index 09189bf62481..6ca4e283e4d8 100644 --- a/solver/llbsolver/solver.go +++ b/solver/llbsolver/solver.go @@ -4,6 +4,7 @@ import ( "context" "encoding/json" "fmt" + "io" "maps" "os" "strconv" @@ -30,6 +31,7 @@ import ( "github.com/moby/buildkit/identity" "github.com/moby/buildkit/session" sessionexporter "github.com/moby/buildkit/session/exporter" + "github.com/moby/buildkit/session/filesync" "github.com/moby/buildkit/solver" "github.com/moby/buildkit/solver/llbsolver/provenance" provenancetypes "github.com/moby/buildkit/solver/llbsolver/provenance/types" @@ -46,6 +48,7 @@ import ( "github.com/moby/buildkit/worker" digest "github.com/opencontainers/go-digest" "github.com/pkg/errors" + "github.com/tonistiigi/fsutil" "golang.org/x/sync/errgroup" "google.golang.org/grpc/codes" "google.golang.org/protobuf/types/known/timestamppb" @@ -58,15 +61,26 @@ const ( ) type ExporterRequest struct { - Exporters []exporter.ExporterInstance + Exporters []Exporter CacheExporters []RemoteCacheExporter EnableSessionExporter bool } +type Exporter struct { + exporter.ExporterAPIs + exporter.ExporterInstance + + // ID identifies the exporter + ID string +} + type RemoteCacheExporter struct { remotecache.Exporter solver.CacheExportMode IgnoreError bool + + // ID identifies the exporter + ID string } // ResolveWorkerFunc returns default worker for the temporary default non-distributed use cases @@ -178,7 +192,7 @@ func (s *Solver) Bridge(b solver.Builder) frontend.FrontendLLBBridge { return s.bridge(b) } -func (s *Solver) recordBuildHistory(ctx context.Context, id string, req frontend.SolveRequest, exp ExporterRequest, j *solver.Job, usage *resources.SysSampler) (func(context.Context, *Result, []exporter.DescriptorReference, error) error, error) { +func (s *Solver) recordBuildHistory(ctx context.Context, id string, req frontend.SolveRequest, exps []Exporter, j *solver.Job, usage *resources.SysSampler) (func(context.Context, *Result, []exporter.DescriptorReference, error) error, error) { stopTrace, err := detect.Recorder.Record(ctx) if err != nil { return nil, errdefs.Internal(err) @@ -191,7 +205,7 @@ func (s *Solver) recordBuildHistory(ctx context.Context, id string, req frontend CreatedAt: timestamppb.Now(), } - for _, e := range exp.Exporters { + for _, e := range exps { rec.Exporters = append(rec.Exporters, &controlapi.Exporter{ Type: e.Type(), Attrs: e.Attrs(), @@ -490,7 +504,7 @@ func (s *Solver) recordBuildHistory(ctx context.Context, id string, req frontend }, nil } -func (s *Solver) Solve(ctx context.Context, id string, sessionID string, req frontend.SolveRequest, exp ExporterRequest, ent []entitlements.Entitlement, post []Processor, internal bool, srcPol *spb.Policy, policySession string) (_ *client.SolveResponse, err error) { +func (s *Solver) Solve(ctx context.Context, id string, sessionID string, req frontend.SolveRequest, exp ExporterRequest, ent []entitlements.Entitlement, post []Processor, internal bool, srcPol *spb.Policy, policySession string) (_ *controlapi.SolveResponse, err error) { j, err := s.solver.NewJob(id) if err != nil { return nil, err @@ -558,7 +572,7 @@ func (s *Solver) Solve(ctx context.Context, id string, sessionID string, req fro } if !internal { - rec, err1 := s.recordBuildHistory(ctx, id, req, exp, j, usage) + rec, err1 := s.recordBuildHistory(ctx, id, req, exp.Exporters, j, usage) if err1 != nil { defer j.CloseProgress() return nil, err1 @@ -670,38 +684,44 @@ func (s *Solver) Solve(ctx context.Context, id string, sessionID string, req fro exp.Exporters = append(exp.Exporters, exporters...) } - var exporterResponse map[string]string - exporterResponse, descrefs, err = s.runExporters(ctx, id, exp.Exporters, inlineCacheExporter, j, cached, inp) + var exporterResponses []*controlapi.ExporterResponse + exporterResponses, descrefs, err = s.runExporters(ctx, id, exp.Exporters, inlineCacheExporter, j, cached, inp) if err != nil { return nil, err } - cacheExporterResponse, err := runCacheExporters(ctx, cacheExporters, j, cached, inp) + cacheExporterResponses, err := runCacheExporters(ctx, cacheExporters, j, cached, inp) if err != nil { return nil, err } - if exporterResponse == nil { - exporterResponse = make(map[string]string) + exporterResponse := &controlapi.SolveResponse{ + ExporterResponseDeprecated: make(map[string]string), + ExporterResponses: exporterResponses, + CacheExporterResponses: cacheExporterResponses, } for k, v := range res.Metadata { if strings.HasPrefix(k, "frontend.") { - exporterResponse[k] = string(v) + exporterResponse.ExporterResponseDeprecated[k] = string(v) } } - for k, v := range cacheExporterResponse { - if strings.HasPrefix(k, "cache.") { - exporterResponse[k] = v + + for _, resp := range exporterResponses { + maps.Copy(exporterResponse.ExporterResponseDeprecated, resp.Data) + } + for _, resp := range cacheExporterResponses { + for k, v := range resp.Data { + if strings.HasPrefix(k, "cache.") { + exporterResponse.ExporterResponseDeprecated[k] = v + } } } - return &client.SolveResponse{ - ExporterResponse: exporterResponse, - }, nil + return exporterResponse, nil } -func (s *Solver) getSessionExporters(ctx context.Context, sessionID string, id int, inp *exporter.Source) ([]exporter.ExporterInstance, error) { +func (s *Solver) getSessionExporters(ctx context.Context, sessionID string, id int, inp *exporter.Source) ([]Exporter, error) { timeoutCtx, cancel := context.WithCancelCause(ctx) timeoutCtx, _ = context.WithTimeoutCause(timeoutCtx, 5*time.Second, errors.WithStack(context.DeadlineExceeded)) //nolint:govet defer func() { cancel(errors.WithStack(context.Canceled)) }() @@ -739,17 +759,22 @@ func (s *Solver) getSessionExporters(ctx context.Context, sessionID string, id i return nil, err } - var out []exporter.ExporterInstance + var out []Exporter for i, req := range res.Exporters { exp, err := w.Exporter(req.Type, s.sm) if err != nil { return nil, err } - expi, err := exp.Resolve(ctx, id+i, req.Attrs) + expi, err := exp.Resolve(ctx, req.Attrs) if err != nil { return nil, err } - out = append(out, expi) + id := fmt.Sprint(id + i) + out = append(out, Exporter{ + ExporterInstance: expi, + ExporterAPIs: NewExporterAPIs(id), + ID: id, + }) } return out, nil } @@ -771,11 +796,10 @@ func validateSourcePolicy(pol *spb.Policy) error { return nil } -func runCacheExporters(ctx context.Context, exporters []RemoteCacheExporter, j *solver.Job, cached *result.Result[solver.CachedResult], inp *result.Result[cache.ImmutableRef]) (map[string]string, error) { +func runCacheExporters(ctx context.Context, exporters []RemoteCacheExporter, j *solver.Job, cached *result.Result[solver.CachedResult], inp *result.Result[cache.ImmutableRef]) (exporterResponses []*controlapi.ExporterResponse, err error) { eg, ctx := errgroup.WithContext(ctx) g := session.NewGroup(j.SessionID) - var cacheExporterResponse map[string]string - resps := make([]map[string]string, len(exporters)) + exporterResponses = make([]*controlapi.ExporterResponse, len(exporters)) for i, exp := range exporters { i, exp := i, exp eg.Go(func() (err error) { @@ -799,7 +823,11 @@ func runCacheExporters(ctx context.Context, exporters []RemoteCacheExporter, j * }); err != nil { return prepareDone(err) } - resps[i], err = exp.Finalize(ctx) + resp, err := exp.Finalize(ctx) + exporterResponses[i] = &controlapi.ExporterResponse{ + Metadata: &controlapi.ExporterMetadata{ID: exp.ID}, + Data: resp, + } return prepareDone(err) }) if exp.IgnoreError { @@ -812,15 +840,7 @@ func runCacheExporters(ctx context.Context, exporters []RemoteCacheExporter, j * return nil, err } - // TODO: separate these out, and return multiple cache exporter responses - // to the client - for _, resp := range resps { - if cacheExporterResponse == nil { - cacheExporterResponse = make(map[string]string) - } - maps.Copy(cacheExporterResponse, resp) - } - return cacheExporterResponse, nil + return exporterResponses, nil } func runInlineCacheExporter(ctx context.Context, e exporter.ExporterInstance, inlineExporter inlineCacheExporter, j *solver.Job, cached *result.Result[solver.CachedResult]) (*result.Result[*exptypes.InlineCacheEntry], error) { @@ -842,14 +862,14 @@ func runInlineCacheExporter(ctx context.Context, e exporter.ExporterInstance, in return res, done(err) } -func (s *Solver) runExporters(ctx context.Context, ref string, exporters []exporter.ExporterInstance, inlineCacheExporter inlineCacheExporter, job *solver.Job, cached *result.Result[solver.CachedResult], inp *exporter.Source) (exporterResponse map[string]string, descrefs []exporter.DescriptorReference, err error) { +func (s *Solver) runExporters(ctx context.Context, ref string, exporters []Exporter, inlineCacheExporter inlineCacheExporter, job *solver.Job, cached *result.Result[solver.CachedResult], inp *exporter.Source) (exporterResponses []*controlapi.ExporterResponse, descrefs []exporter.DescriptorReference, err error) { warnings, err := verifier.CheckInvalidPlatforms(ctx, inp) if err != nil { return nil, nil, err } eg, ctx := errgroup.WithContext(ctx) - resps := make([]map[string]string, len(exporters)) + exporterResponses = make([]*controlapi.ExporterResponse, len(exporters)) descs := make([]exporter.DescriptorReference, len(exporters)) var inlineCacheMu sync.Mutex for i, exp := range exporters { @@ -875,14 +895,19 @@ func (s *Solver) runExporters(ctx context.Context, ref string, exporters []expor return runInlineCacheExporter(ctx, exp, inlineCacheExporter, job, cached) }) - resps[i], descs[i], err = exp.Export(ctx, inp, exporter.ExportBuildInfo{ + var md map[string]string + md, descs[i], err = exp.Export(ctx, inp, exporter.ExportBuildInfo{ Ref: ref, SessionID: job.SessionID, InlineCache: inlineCache, - }) + }, exp.ExporterAPIs) if err != nil { return err } + exporterResponses[i] = &controlapi.ExporterResponse{ + Metadata: &controlapi.ExporterMetadata{ID: exp.ID}, + Data: md, + } return nil }) }) @@ -904,18 +929,7 @@ func (s *Solver) runExporters(ctx context.Context, ref string, exporters []expor } } - // TODO: separate these out, and return multiple exporter responses to the - // client - for _, resp := range resps { - for k, v := range resp { - if exporterResponse == nil { - exporterResponse = make(map[string]string) - } - exporterResponse[k] = v - } - } - - return exporterResponse, descs, nil + return exporterResponses, descs, nil } func (s *Solver) leaseManager() (*leaseutil.Manager, error) { @@ -1274,3 +1288,28 @@ func loadSourcePolicySession(b solver.Builder) (string, error) { } return session, nil } + +// ResolveWorkerFunc returns default worker for the temporary default non-distributed use cases +// Opt defines options for new Solver. +// Processor defines a processing function to be applied after solving, but +// before exporting +// NewExporterAPIs creates a new exporter API for the specified id +func NewExporterAPIs(id string) exporter.ExporterAPIs { + apis := exporterAPIs{exporterID: id} + return exporter.ExporterAPIs{ + CopyToCaller: apis.CopyToCaller, + CopyFileWriter: apis.CopyFileWriter, + } +} + +func (r exporterAPIs) CopyToCaller(ctx context.Context, fs fsutil.FS, c session.Caller, progress func(int, bool)) error { + return filesync.CopyToCaller(ctx, fs, r.exporterID, c, progress) +} + +func (r exporterAPIs) CopyFileWriter(ctx context.Context, md map[string]string, c session.Caller) (io.WriteCloser, error) { + return filesync.CopyFileWriter(ctx, md, r.exporterID, c) +} + +type exporterAPIs struct { + exporterID string +}