digraph G {
subgraph cluster0 {
isCluster="true";
label="WholeStageCodegen (1)";
1 [labelType="html" label="<br><b>Project</b><br><br>"];
2 [labelType="html" label="<br><b>SerializeFromObject</b><br><br>"];
3 [labelType="html" label="<br><b>MapElements</b><br><br>"];
4 [labelType="html" label="<br><b>DeserializeToObject</b><br><br>"];
5 [labelType="html" label="<br><b>Project</b><br><br>"];
6 [labelType="html" label="<br><b>Filter</b><br><br>"];
7 [labelType="html" label="<br><b>Scan ExistingRDD</b><br><br>"];
}
2->1;
3->2;
4->3;
5->4;
6->5;
7->6;
}
8
Project [path#1926, partitionValues#1927, size#1928L, modificationTime#1929L, dataChange#1930, null AS stats#1949, tags#1932, deletionVector#1933, baseRowId#1934L, defaultRowCommitVersion#1935L, clusteringProvider#1936]
SerializeFromObject [staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).path, true, false, true) AS path#1926, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -1), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -1), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -2), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -2), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).partitionValues) AS partitionValues#1927, knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).size AS size#1928L, knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).modificationTime AS modificationTime#1929L, knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).dataChange AS dataChange#1930, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -3), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -3), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -4), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -4), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).tags) AS tags#1932, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).deletionVector)) null else named_struct(storageType, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).deletionVector).storageType, true, false, true), pathOrInlineDv, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).deletionVector).pathOrInlineDv, true, false, true), offset, unwrapoption(IntegerType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).deletionVector).offset), sizeInBytes, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).deletionVector).sizeInBytes, cardinality, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).deletionVector).cardinality, maxRowIndex, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).deletionVector).maxRowIndex)) AS deletionVector#1933, unwrapoption(LongType, knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).baseRowId) AS baseRowId#1934L, unwrapoption(LongType, knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).defaultRowCommitVersion) AS defaultRowCommitVersion#1935L, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, unwrapoption(ObjectType(class java.lang.String), knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).clusteringProvider), true, false, true) AS clusteringProvider#1936]
MapElements org.apache.spark.sql.Dataset$$Lambda$5811/0x00007f301da515e0@2e0988b3, obj#1925: org.apache.spark.sql.delta.actions.AddFile
DeserializeToObject newInstance(class scala.Tuple1), obj#1924: scala.Tuple1
Project [add#1860]
Filter isnotnull(add#1860)
Scan ExistingRDD[txn#1859,add#1860,remove#1861,metaData#1862,protocol#1863,cdc#1864,checkpointMetadata#1865,sidecar#1866,domainMetadata#1867,commitInfo#1868]
WholeStageCodegen (1)
== Physical Plan ==
* Project (7)
+- * SerializeFromObject (6)
+- * MapElements (5)
+- * DeserializeToObject (4)
+- * Project (3)
+- * Filter (2)
+- * Scan ExistingRDD (1)
(1) Scan ExistingRDD [codegen id : 1]
Output [10]: [txn#1859, add#1860, remove#1861, metaData#1862, protocol#1863, cdc#1864, checkpointMetadata#1865, sidecar#1866, domainMetadata#1867, commitInfo#1868]
Arguments: [txn#1859, add#1860, remove#1861, metaData#1862, protocol#1863, cdc#1864, checkpointMetadata#1865, sidecar#1866, domainMetadata#1867, commitInfo#1868], MapPartitionsRDD[116] at $anonfun$recordDeltaOperationInternal$1 at DatabricksLogging.scala:128, ExistingRDD, UnknownPartitioning(0)
(2) Filter [codegen id : 1]
Input [10]: [txn#1859, add#1860, remove#1861, metaData#1862, protocol#1863, cdc#1864, checkpointMetadata#1865, sidecar#1866, domainMetadata#1867, commitInfo#1868]
Condition : isnotnull(add#1860)
(3) Project [codegen id : 1]
Output [1]: [add#1860]
Input [10]: [txn#1859, add#1860, remove#1861, metaData#1862, protocol#1863, cdc#1864, checkpointMetadata#1865, sidecar#1866, domainMetadata#1867, commitInfo#1868]
(4) DeserializeToObject [codegen id : 1]
Input [1]: [add#1860]
Arguments: newInstance(class scala.Tuple1), obj#1924: scala.Tuple1
(5) MapElements [codegen id : 1]
Input [1]: [obj#1924]
Arguments: org.apache.spark.sql.Dataset$$Lambda$5811/0x00007f301da515e0@2e0988b3, obj#1925: org.apache.spark.sql.delta.actions.AddFile
(6) SerializeFromObject [codegen id : 1]
Input [1]: [obj#1925]
Arguments: [staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).path, true, false, true) AS path#1926, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -1), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -1), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -2), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -2), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).partitionValues) AS partitionValues#1927, knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).size AS size#1928L, knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).modificationTime AS modificationTime#1929L, knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).dataChange AS dataChange#1930, externalmaptocatalyst(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -3), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_key, ObjectType(class java.lang.Object), true, -3), StringType, ObjectType(class java.lang.String)), true, false, true), lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -4), staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, validateexternaltype(lambdavariable(ExternalMapToCatalyst_value, ObjectType(class java.lang.Object), true, -4), StringType, ObjectType(class java.lang.String)), true, false, true), knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).tags) AS tags#1932, if (isnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).deletionVector)) null else named_struct(storageType, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).deletionVector).storageType, true, false, true), pathOrInlineDv, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).deletionVector).pathOrInlineDv, true, false, true), offset, unwrapoption(IntegerType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).deletionVector).offset), sizeInBytes, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).deletionVector).sizeInBytes, cardinality, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).deletionVector).cardinality, maxRowIndex, unwrapoption(LongType, knownnotnull(knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).deletionVector).maxRowIndex)) AS deletionVector#1933, unwrapoption(LongType, knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).baseRowId) AS baseRowId#1934L, unwrapoption(LongType, knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).defaultRowCommitVersion) AS defaultRowCommitVersion#1935L, staticinvoke(class org.apache.spark.unsafe.types.UTF8String, StringType, fromString, unwrapoption(ObjectType(class java.lang.String), knownnotnull(assertnotnull(input[0, org.apache.spark.sql.delta.actions.AddFile, true])).clusteringProvider), true, false, true) AS clusteringProvider#1936]
(7) Project [codegen id : 1]
Output [11]: [path#1926, partitionValues#1927, size#1928L, modificationTime#1929L, dataChange#1930, null AS stats#1949, tags#1932, deletionVector#1933, baseRowId#1934L, defaultRowCommitVersion#1935L, clusteringProvider#1936]
Input [10]: [path#1926, partitionValues#1927, size#1928L, modificationTime#1929L, dataChange#1930, tags#1932, deletionVector#1933, baseRowId#1934L, defaultRowCommitVersion#1935L, clusteringProvider#1936]