From e985917f8711d34f07457df28fe336cde87a173b Mon Sep 17 00:00:00 2001 From: Ashutosh Dwivedi Date: Tue, 29 Jan 2019 21:36:58 +0530 Subject: [PATCH 1/4] Fix jar path and add missing ones for spark jobs Fix path of jars / add missing jars in spark job remove print, reduce clutter --- .../mxnet/spark/SharedSparkContext.scala | 24 ++++++++++--------- 1 file changed, 13 insertions(+), 11 deletions(-) diff --git a/scala-package/spark/src/test/scala/org/apache/mxnet/spark/SharedSparkContext.scala b/scala-package/spark/src/test/scala/org/apache/mxnet/spark/SharedSparkContext.scala index 2efd1814bc90..792b62ea6e3e 100644 --- a/scala-package/spark/src/test/scala/org/apache/mxnet/spark/SharedSparkContext.scala +++ b/scala-package/spark/src/test/scala/org/apache/mxnet/spark/SharedSparkContext.scala @@ -81,17 +81,15 @@ trait SharedSparkContext extends FunSuite with BeforeAndAfterEach with BeforeAnd } private def getJarFilePath(root: String): String = { - for (platform <- List("linux-x86_64-cpu", "linux-x86_64-gpu", "osx-x86_64-cpu")) { - val jarFiles = new File(s"$root/$platform/target/").listFiles(new FileFilter { - override def accept(pathname: File) = { - pathname.getAbsolutePath.endsWith(".jar") && - !pathname.getAbsolutePath.contains("javadoc") && - !pathname.getAbsolutePath.contains("sources") - } - }) - if (jarFiles != null && jarFiles.nonEmpty) { - return jarFiles.head.getAbsolutePath + val jarFiles = new File(s"$root/target/").listFiles(new FileFilter { + override def accept(pathname: File) = { + pathname.getAbsolutePath.endsWith(".jar") && + !pathname.getAbsolutePath.contains("bundle") && + !pathname.getAbsolutePath.contains("src") } + }) + if (jarFiles != null && jarFiles.nonEmpty) { + return jarFiles.head.getAbsolutePath } null } @@ -111,6 +109,8 @@ trait SharedSparkContext extends FunSuite with BeforeAndAfterEach with BeforeAnd } } + private def getNativeJars(root:String): String = new File(root).listFiles().map(_.toPath).mkString(",") + protected def buildLeNet(): MXNet = { val workingDir = composeWorkingDirPath val assemblyRoot = s"$workingDir/../assembly" @@ -130,6 +130,8 @@ trait SharedSparkContext extends FunSuite with BeforeAndAfterEach with BeforeAnd protected def buildMlp(): MXNet = { val workingDir = composeWorkingDirPath val assemblyRoot = s"$workingDir/../assembly" + val nativeRoot =s"$workingDir/../native/target/lib" + new MXNet() .setBatchSize(128) .setLabelName("softmax_label") @@ -139,7 +141,7 @@ trait SharedSparkContext extends FunSuite with BeforeAndAfterEach with BeforeAnd .setNumEpoch(10) .setNumServer(1) .setNumWorker(numWorkers) - .setExecutorJars(s"${getJarFilePath(assemblyRoot)},$getSparkJar") + .setExecutorJars(s"${getJarFilePath(assemblyRoot)},$getSparkJar,${getNativeJars(nativeRoot)}") .setJava("java") .setTimeout(0) } From 9adcedcb11399053937ae68ea72d4377723413e3 Mon Sep 17 00:00:00 2001 From: Ashutosh Dwivedi Date: Tue, 5 Feb 2019 12:45:40 +0530 Subject: [PATCH 2/4] fixes scalastyle violations --- .../org/apache/mxnet/spark/SharedSparkContext.scala | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/scala-package/spark/src/test/scala/org/apache/mxnet/spark/SharedSparkContext.scala b/scala-package/spark/src/test/scala/org/apache/mxnet/spark/SharedSparkContext.scala index 792b62ea6e3e..2e82a7a493b9 100644 --- a/scala-package/spark/src/test/scala/org/apache/mxnet/spark/SharedSparkContext.scala +++ b/scala-package/spark/src/test/scala/org/apache/mxnet/spark/SharedSparkContext.scala @@ -89,9 +89,10 @@ trait SharedSparkContext extends FunSuite with BeforeAndAfterEach with BeforeAnd } }) if (jarFiles != null && jarFiles.nonEmpty) { - return jarFiles.head.getAbsolutePath + jarFiles.head.getAbsolutePath + } else { + null } - null } private def getSparkJar: String = { @@ -109,7 +110,8 @@ trait SharedSparkContext extends FunSuite with BeforeAndAfterEach with BeforeAnd } } - private def getNativeJars(root:String): String = new File(root).listFiles().map(_.toPath).mkString(",") + private def getNativeJars(root: String): String = + new File(root).listFiles().map(_.toPath).mkString(",") protected def buildLeNet(): MXNet = { val workingDir = composeWorkingDirPath @@ -130,7 +132,7 @@ trait SharedSparkContext extends FunSuite with BeforeAndAfterEach with BeforeAnd protected def buildMlp(): MXNet = { val workingDir = composeWorkingDirPath val assemblyRoot = s"$workingDir/../assembly" - val nativeRoot =s"$workingDir/../native/target/lib" + val nativeRoot = s"$workingDir/../native/target/lib" new MXNet() .setBatchSize(128) From 6f4b56fbd0cc6dedf3195d3cfd1a2c82b3e00865 Mon Sep 17 00:00:00 2001 From: Ashutosh Dwivedi Date: Fri, 8 Feb 2019 00:22:06 +0530 Subject: [PATCH 3/4] exclude all of javadoc, sources, bundle, and src while searching for jars --- .../mxnet/spark/SharedSparkContext.scala | 20 +++++++++---------- 1 file changed, 9 insertions(+), 11 deletions(-) diff --git a/scala-package/spark/src/test/scala/org/apache/mxnet/spark/SharedSparkContext.scala b/scala-package/spark/src/test/scala/org/apache/mxnet/spark/SharedSparkContext.scala index 2e82a7a493b9..4bc70848fb24 100644 --- a/scala-package/spark/src/test/scala/org/apache/mxnet/spark/SharedSparkContext.scala +++ b/scala-package/spark/src/test/scala/org/apache/mxnet/spark/SharedSparkContext.scala @@ -80,14 +80,18 @@ trait SharedSparkContext extends FunSuite with BeforeAndAfterEach with BeforeAnd System.getProperty("user.dir") } - private def getJarFilePath(root: String): String = { - val jarFiles = new File(s"$root/target/").listFiles(new FileFilter { + private def findJars(root: String): Array[File] = { + val excludedSuffixes = List("bundle", "src", "javadoc", "sources") + new File(root).listFiles(new FileFilter { override def accept(pathname: File) = { pathname.getAbsolutePath.endsWith(".jar") && - !pathname.getAbsolutePath.contains("bundle") && - !pathname.getAbsolutePath.contains("src") + excludedSuffixes.map(!pathname.getAbsolutePath.contains(_)).forall(identity) } }) + } + + private def getJarFilePath(root: String): String = { + val jarFiles = findJars(s"$root/target/") if (jarFiles != null && jarFiles.nonEmpty) { jarFiles.head.getAbsolutePath } else { @@ -96,13 +100,7 @@ trait SharedSparkContext extends FunSuite with BeforeAndAfterEach with BeforeAnd } private def getSparkJar: String = { - val jarFiles = new File(s"$composeWorkingDirPath/target/").listFiles(new FileFilter { - override def accept(pathname: File) = { - pathname.getAbsolutePath.endsWith(".jar") && - !pathname.getAbsolutePath.contains("javadoc") && - !pathname.getAbsolutePath.contains("sources") - } - }) + val jarFiles = findJars(s"$composeWorkingDirPath/target/") if (jarFiles != null && jarFiles.nonEmpty) { jarFiles.head.getAbsolutePath } else { From e70371d1231c8c3e84c72f86b66d96a14acdad6a Mon Sep 17 00:00:00 2001 From: Ashutosh Dwivedi Date: Fri, 8 Feb 2019 00:38:28 +0530 Subject: [PATCH 4/4] simplfied the exclude experession --- .../test/scala/org/apache/mxnet/spark/SharedSparkContext.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scala-package/spark/src/test/scala/org/apache/mxnet/spark/SharedSparkContext.scala b/scala-package/spark/src/test/scala/org/apache/mxnet/spark/SharedSparkContext.scala index 4bc70848fb24..6d36ca51db90 100644 --- a/scala-package/spark/src/test/scala/org/apache/mxnet/spark/SharedSparkContext.scala +++ b/scala-package/spark/src/test/scala/org/apache/mxnet/spark/SharedSparkContext.scala @@ -85,7 +85,7 @@ trait SharedSparkContext extends FunSuite with BeforeAndAfterEach with BeforeAnd new File(root).listFiles(new FileFilter { override def accept(pathname: File) = { pathname.getAbsolutePath.endsWith(".jar") && - excludedSuffixes.map(!pathname.getAbsolutePath.contains(_)).forall(identity) + excludedSuffixes.forall(!pathname.getAbsolutePath.contains(_)) } }) }