@@ -293,25 +293,27 @@ object BulkCopyUtils extends Logging {
293
293
val autoCols = getAutoCols(conn, dbtable)
294
294
295
295
val columnsToWriteSet = columnsToWrite.split(" ," ).toSet
296
+ logDebug(s " columnsToWrite: $columnsToWriteSet" )
296
297
297
298
val prefix = " Spark Dataframe and SQL Server table have differing"
298
299
299
300
// auto columns should not exist in df
300
301
assertIfCheckEnabled(dfCols.length + autoCols.length == tableCols.length, strictSchemaCheck,
301
302
s " ${prefix} numbers of columns " )
302
303
303
- if (columnsToWriteSet.isEmpty()) {
304
- val result = new Array [ColumnMetadata ](tableCols.length - autoCols.length)
305
- } else {
304
+ // if columnsToWrite provided by user, use it for metadata mapping. If not, use sql table.
305
+ if (columnsToWrite == " " ) {
306
306
val result = new Array [ColumnMetadata ](columnsToWriteSet.size)
307
+ } else {
308
+ val result = new Array [ColumnMetadata ](tableCols.length - autoCols.length)
307
309
}
308
310
309
311
var nonAutoColIndex = 0
310
312
311
313
for (i <- 0 to tableCols.length- 1 ) {
312
314
val tableColName = tableCols(i).name
313
315
var dfFieldIndex = - 1
314
- if (! columnsToWriteSet.isEmpty() && ! columnsToWriteSet.contains(tableColName)) {
316
+ if (! columnsToWriteSet.isEmpty && ! columnsToWriteSet.contains(tableColName)) {
315
317
// if columnsToWrite provided, and column name not in it, skip column mapping and ColumnMetadata
316
318
logDebug(s " skipping col index $i col name $tableColName, user not provided in columnsToWrite list " )
317
319
} else if (autoCols.contains(tableColName)) {
0 commit comments