실행 {SPARK_HOME}bin/spark-shell --conf "spark.jars=/myhome/jars/delta-spark_2.12-3.1.0.jar" --conf "spark.jars=/myhome/jars/delta-hive-assembly_2.12-3.1.0.jar" --conf "spark.sql.extensions=io.delta.sql.DeltaSparkSessionExtension" --conf "spark.sql.catalog.spark_catalog=org.apache.spark.sql.delta.catalog.DeltaCatalog"new SparkSession.Builder() .appName(commandLineOptions.className()) .config("spar..