diff --git a/omnioperator/omniop-spark-extension/cpp/src/jni/OrcColumnarBatchJniReader.cpp b/omnioperator/omniop-spark-extension/cpp/src/jni/OrcColumnarBatchJniReader.cpp index 3b38f0deae9ebf178e2878cb4e69b8d54ec7e902..3a096699a05900a83b09bfa0ddca5a7be1fdc4ea 100644 --- a/omnioperator/omniop-spark-extension/cpp/src/jni/OrcColumnarBatchJniReader.cpp +++ b/omnioperator/omniop-spark-extension/cpp/src/jni/OrcColumnarBatchJniReader.cpp @@ -382,30 +382,27 @@ int copyToOminVec(int maxLen, int vcType, int &ominTypeId, uint64_t &ominVecId, return 1; } -int copyToOminDecimalVec(int vcType, int &ominTypeId, uint64_t &ominVecId, orc::ColumnVectorBatch *field) +int copyToOmniDecimalVec(int precision, int &ominTypeId, uint64_t &ominVecId, orc::ColumnVectorBatch *field) { VectorAllocator *allocator = VectorAllocator::GetGlobalAllocator(); - if (vcType > 18) { + if (precision > 18) { ominTypeId = static_cast(OMNI_DECIMAL128); orc::Decimal128VectorBatch *lvb = dynamic_cast(field); FixedWidthVector *originalVector = new FixedWidthVector(allocator, lvb->numElements); for (int i = 0; i < lvb->numElements; i++) { if (lvb->notNull.data()[i]) { - bool wasNegative = false; int64_t highbits = lvb->values.data()[i].getHighBits(); uint64_t lowbits = lvb->values.data()[i].getLowBits(); - uint64_t high = 0; - uint64_t low = 0; - if (highbits < 0) { - low = ~lowbits + 1; - high = static_cast(~highbits); - if (low == 0) { - high += 1; + if (highbits < 0) { // int128's 2s' complement code + lowbits = ~lowbits + 1; // 2s' complement code + highbits = ~highbits; //1s' complement code + if (lowbits == 0) { + highbits += 1; // carry a number as in adding } - highbits = high | ((uint64_t)1 << 63); + highbits ^= ((uint64_t)1 << 63); } - Decimal128 d128(highbits, low); + Decimal128 d128(highbits, lowbits); originalVector->SetValue(i, d128); } else { originalVector->SetValueNull(i); @@ -450,7 +447,7 @@ JNIEXPORT jlong JNICALL Java_com_huawei_boostkit_spark_jni_OrcColumnarBatchJniRe if (vcType != orc::TypeKind::DECIMAL) { copyToOminVec(maxLen, vcType, ominTypeId, ominVecId, root->fields[id]); } else { - copyToOminDecimalVec(baseTp.getSubtype(id)->getPrecision(), ominTypeId, ominVecId, + copyToOmniDecimalVec(baseTp.getSubtype(id)->getPrecision(), ominTypeId, ominVecId, root->fields[id]); } } catch (omniruntime::exception::OmniException &e) { @@ -550,4 +547,4 @@ JNIEXPORT jlong JNICALL Java_com_huawei_boostkit_spark_jni_OrcColumnarBatchJniRe rowReaderPtr->next(*columnVectorBatch); jlong rows = columnVectorBatch->numElements; return rows; -} \ No newline at end of file +} diff --git a/omnioperator/omniop-spark-extension/cpp/src/jni/OrcColumnarBatchJniReader.h b/omnioperator/omniop-spark-extension/cpp/src/jni/OrcColumnarBatchJniReader.h index 5d05f73471a014de14fe870ec602570b73110d84..d8a5b122226d1ac9a60b810ab0e93bd63f1f0d9d 100644 --- a/omnioperator/omniop-spark-extension/cpp/src/jni/OrcColumnarBatchJniReader.h +++ b/omnioperator/omniop-spark-extension/cpp/src/jni/OrcColumnarBatchJniReader.h @@ -142,6 +142,8 @@ int buildLeafs(int leafOp, std::vector &litList, orc::Literal &lit int copyToOminVec(int maxLen, int vcType, int &ominTypeId, uint64_t &ominVecId, orc::ColumnVectorBatch *field); +int copyToOmniDecimalVec(int precision, int &ominTypeId, uint64_t &ominVecId, orc::ColumnVectorBatch *field); + #ifdef __cplusplus } #endif diff --git a/omnioperator/omniop-spark-extension/java/src/main/scala/org/apache/spark/sql/execution/ColumnarFileSourceScanExec.scala b/omnioperator/omniop-spark-extension/java/src/main/scala/org/apache/spark/sql/execution/ColumnarFileSourceScanExec.scala index f1fb7f99a662e7fcda92e8836bff6dd5e5ffae69..845262a056060cea138bd2467256a92b7bafa523 100644 --- a/omnioperator/omniop-spark-extension/java/src/main/scala/org/apache/spark/sql/execution/ColumnarFileSourceScanExec.scala +++ b/omnioperator/omniop-spark-extension/java/src/main/scala/org/apache/spark/sql/execution/ColumnarFileSourceScanExec.scala @@ -373,13 +373,7 @@ abstract class BaseColumnarFileSourceScanExec( def buildCheck(): Unit = { output.zipWithIndex.foreach { case (attr, i) => - sparkTypeToOmniType(attr.dataType, attr.metadata) - if (attr.dataType.isInstanceOf[DecimalType]) { - val dt = attr.dataType.asInstanceOf[DecimalType] - if (!DecimalType.is64BitDecimalType(dt)) { - throw new UnsupportedOperationException(s"ColumnarTableScan is not supported for type:${dt}"); - } - } + sparkTypeToOmniType(attr.dataType, attr.metadata) } }