Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions Sources/SparkConnect/ArrowData.swift
Original file line number Diff line number Diff line change
Expand Up @@ -39,19 +39,19 @@ public class ArrowData {
) throws {
let infoType = arrowType.info
switch infoType {
case let .primitiveInfo(typeId):
case .primitiveInfo(let typeId):
if typeId == ArrowTypeId.unknown {
throw ArrowError.unknownType("Unknown primitive type for data")
}
case let .variableInfo(typeId):
case .variableInfo(let typeId):
if typeId == ArrowTypeId.unknown {
throw ArrowError.unknownType("Unknown variable type for data")
}
case let .timeInfo(typeId):
case .timeInfo(let typeId):
if typeId == ArrowTypeId.unknown {
throw ArrowError.unknownType("Unknown time type for data")
}
case let .complexInfo(typeId):
case .complexInfo(let typeId):
if typeId == ArrowTypeId.unknown {
throw ArrowError.unknownType("Unknown complex type for data")
}
Expand Down
13 changes: 7 additions & 6 deletions Sources/SparkConnect/Extension.swift
Original file line number Diff line number Diff line change
Expand Up @@ -182,14 +182,15 @@ extension String {
}
}

var toDatasetType: DatasetType {
var toOutputType: OutputType {
let mode =
switch self {
case "unspecified": DatasetType.unspecified
case "materializedView": DatasetType.materializedView
case "table": DatasetType.table
case "temporaryView": DatasetType.temporaryView
default: DatasetType.UNRECOGNIZED(-1)
case "unspecified": OutputType.unspecified
case "materializedView": OutputType.materializedView
case "table": OutputType.table
case "temporaryView": OutputType.temporaryView
case "sink": OutputType.sink
default: OutputType.UNRECOGNIZED(-1)
}
return mode
}
Expand Down
24 changes: 12 additions & 12 deletions Sources/SparkConnect/SparkConnectClient.swift
Original file line number Diff line number Diff line change
Expand Up @@ -145,8 +145,8 @@ public actor SparkConnectClient {
throw SparkConnectError.InvalidViewName
case let m where m.contains("DATA_SOURCE_NOT_FOUND"):
throw SparkConnectError.DataSourceNotFound
case let m where m.contains("DATASET_TYPE_UNSPECIFIED"):
throw SparkConnectError.DatasetTypeUnspecified
case let m where m.contains("OUTPUT_TYPE_UNSPECIFIED"):
throw SparkConnectError.OutputTypeUnspecified
default:
throw error
}
Expand Down Expand Up @@ -1240,27 +1240,27 @@ public actor SparkConnectClient {
}

@discardableResult
func defineDataset(
func defineOutput(
_ dataflowGraphID: String,
_ datasetName: String,
_ datasetType: String,
_ outputName: String,
_ outputType: String,
_ comment: String? = nil
) async throws -> Bool {
try await withGPRC { client in
if UUID(uuidString: dataflowGraphID) == nil {
throw SparkConnectError.InvalidArgument
}

var defineDataset = Spark_Connect_PipelineCommand.DefineDataset()
defineDataset.dataflowGraphID = dataflowGraphID
defineDataset.datasetName = datasetName
defineDataset.datasetType = datasetType.toDatasetType
var defineOutput = Spark_Connect_PipelineCommand.DefineOutput()
defineOutput.dataflowGraphID = dataflowGraphID
defineOutput.outputName = outputName
defineOutput.outputType = outputType.toOutputType
if let comment {
defineDataset.comment = comment
defineOutput.comment = comment
}

var pipelineCommand = Spark_Connect_PipelineCommand()
pipelineCommand.commandType = .defineDataset(defineDataset)
pipelineCommand.commandType = .defineOutput(defineOutput)

var command = Spark_Connect_Command()
command.commandType = .pipelineCommand(pipelineCommand)
Expand Down Expand Up @@ -1288,7 +1288,7 @@ public actor SparkConnectClient {
defineFlow.dataflowGraphID = dataflowGraphID
defineFlow.flowName = flowName
defineFlow.targetDatasetName = targetDatasetName
defineFlow.relation = relation
// defineFlow.relation = relation

var pipelineCommand = Spark_Connect_PipelineCommand()
pipelineCommand.commandType = .defineFlow(defineFlow)
Expand Down
2 changes: 1 addition & 1 deletion Sources/SparkConnect/SparkConnectError.swift
Original file line number Diff line number Diff line change
Expand Up @@ -22,11 +22,11 @@ public enum SparkConnectError: Error {
case CatalogNotFound
case ColumnNotFound
case DataSourceNotFound
case DatasetTypeUnspecified
case InvalidArgument
case InvalidSessionID
case InvalidType
case InvalidViewName
case OutputTypeUnspecified
case ParseSyntaxError
case SchemaNotFound
case SessionClosed
Expand Down
2 changes: 1 addition & 1 deletion Sources/SparkConnect/TypeAliases.swift
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@ typealias AnalyzePlanResponse = Spark_Connect_AnalyzePlanResponse
typealias Command = Spark_Connect_Command
typealias ConfigRequest = Spark_Connect_ConfigRequest
typealias DataSource = Spark_Connect_Read.DataSource
typealias DatasetType = Spark_Connect_DatasetType
typealias DataType = Spark_Connect_DataType
typealias DayTimeInterval = Spark_Connect_DataType.DayTimeInterval
typealias Drop = Spark_Connect_Drop
Expand All @@ -45,6 +44,7 @@ typealias MergeIntoTableCommand = Spark_Connect_MergeIntoTableCommand
typealias NamedTable = Spark_Connect_Read.NamedTable
typealias OneOf_Analyze = AnalyzePlanRequest.OneOf_Analyze
typealias OneOf_CatType = Spark_Connect_Catalog.OneOf_CatType
typealias OutputType = Spark_Connect_OutputType
typealias Plan = Spark_Connect_Plan
typealias Project = Spark_Connect_Project
typealias Range = Spark_Connect_Range
Expand Down
Loading
Loading