sample stacktrace ```Exception in thread "main" ja...
# kotlin-spark
r
sample stacktrace
Copy code
Exception in thread "main" java.lang.RuntimeException: Error while encoding: java.util.concurrent.ExecutionException: org.codehaus.commons.compiler.CompileException: File 'generated.java', Line 90, Column 24: failed to compile: org.codehaus.commons.compiler.CompileException: File 'generated.java', Line 90, Column 24: No applicable constructor/method found for actual parameters "int, java.lang.Object"; candidates are: "public void org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter.write(int, long)", "public void org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter.write(int, int)", "public void org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter.write(int, short)", "public void org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter.write(int, boolean)", "public void org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter.write(int, byte)", "public void org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter.write(int, org.apache.spark.sql.types.Decimal, int, int)", "public void org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter.write(int, double)", "public void org.apache.spark.sql.catalyst.expressions.codegen.UnsafeArrayWriter.write(int, float)", "public void org.apache.spark.sql.catalyst.expressions.codegen.UnsafeWriter.write(int, byte[])", "public abstract void org.apache.spark.sql.catalyst.expressions.codegen.UnsafeWriter.write(int, long)", "public abstract void org.apache.spark.sql.catalyst.expressions.codegen.UnsafeWriter.write(int, org.apache.spark.sql.types.Decimal, int, int)", "public abstract void org.apache.spark.sql.catalyst.expressions.codegen.UnsafeWriter.write(int, double)", "public abstract void org.apache.spark.sql.catalyst.expressions.codegen.UnsafeWriter.write(int, float)", "public abstract void org.apache.spark.sql.catalyst.expressions.codegen.UnsafeWriter.write(int, int)", "public void org.apache.spark.sql.catalyst.expressions.codegen.UnsafeWriter.write(org.apache.spark.sql.catalyst.expressions.UnsafeArrayData)", "public void org.apache.spark.sql.catalyst.expressions.codegen.UnsafeWriter.write(int, org.apache.spark.sql.catalyst.expressions.UnsafeMapData)", "public void org.apache.spark.sql.catalyst.expressions.codegen.UnsafeWriter.write(int, org.apache.spark.sql.catalyst.expressions.UnsafeRow)", "public void org.apache.spark.sql.catalyst.expressions.codegen.UnsafeWriter.write(int, org.apache.spark.unsafe.types.CalendarInterval)", "public void org.apache.spark.sql.catalyst.expressions.codegen.UnsafeWriter.write(int, byte[], int, int)", "public abstract void org.apache.spark.sql.catalyst.expressions.codegen.UnsafeWriter.write(int, boolean)", "public void org.apache.spark.sql.catalyst.expressions.codegen.UnsafeWriter.write(int, org.apache.spark.unsafe.types.UTF8String)", "public abstract void org.apache.spark.sql.catalyst.expressions.codegen.UnsafeWriter.write(int, short)", "public abstract void org.apache.spark.sql.catalyst.expressions.codegen.UnsafeWriter.write(int, byte)"
newInstance(class org.apache.spark.sql.catalyst.util.GenericArrayData) AS value#14
	at org.apache.spark.sql.catalyst.encoders.ExpressionEncoder$Serializer.apply(ExpressionEncoder.scala:215)
	at org.apache.spark.sql.SparkSession.$anonfun$createDataset$1(SparkSession.scala:466)
	at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:238)
	at scala.collection.Iterator.foreach(Iterator.scala:941)
	at scala.collection.Iterator.foreach$(Iterator.scala:941)
	at scala.collection.AbstractIterator.foreach(Iterator.scala:1429)
	at scala.collection.IterableLike.foreach(IterableLike.scala:74)
	at scala.collection.IterableLike.foreach$(IterableLike.scala:73)
	at scala.collection.AbstractIterable.foreach(Iterable.scala:56)
	at scala.collection.TraversableLike.map(TraversableLike.scala:238)