user
05/22/2022, 8:56 PMuser
05/22/2022, 9:05 PMuser
05/23/2022, 6:09 AMuser
05/23/2022, 6:11 AMuser
05/23/2022, 6:12 AMuser
05/23/2022, 6:13 AMuser
05/23/2022, 6:19 AMuser
05/23/2022, 6:30 AMuser
05/23/2022, 6:32 AMuser
05/23/2022, 6:41 AMuser
05/23/2022, 6:42 AMuser
05/23/2022, 6:42 AMval conf = Engine.createSparkConf()
.setAppName("Train Lenet on MNIST")
.set("spark.task.maxFailures", "1")
val sc = new SparkContext(conf)
Engine.init
Do you mind trying the following:
val conf = Engine.createSparkConf()
.setAppName("Train Lenet on MNIST")
.set("spark.task.maxFailures", "1")
val sc = new SparkContext(conf)
sc.hadoopConfiguration.set("fs.s3a.endpoint", <your lakeFS server endpoint>)
sc.hadoopConfiguration.set("fs.s3a.access.key", <your lakeFS access key>)
sc.hadoopConfiguration.set("fs.s3a.secret.key", <your lakeFS secret key>)
sc.hadoopConfiguration.set("fs.s3a.path.style.access", true)
Engine.init
?user
05/23/2022, 6:51 AMuser
05/23/2022, 7:18 AMuser
05/23/2022, 7:32 AMuser
05/23/2022, 7:49 AMuser
05/23/2022, 10:47 AM