From a02446cf1c13fb6b817775c595cd7e46e73e6647 Mon Sep 17 00:00:00 2001 From: Scala Steward <43047562+scala-steward@users.noreply.github.com> Date: Wed, 6 Mar 2024 16:41:25 +0100 Subject: [PATCH 1/8] Doc tooling: Update fansi from 0.2.3 to 0.2.14 (#1400) Pull request: https://github.com/com-lihaoyi/Ammonite/pull/1400/ --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index b457f0019..cc5646294 100644 --- a/build.sbt +++ b/build.sbt @@ -7,7 +7,7 @@ lazy val readme = ScalatexReadme( source = "Index" ).settings( scalaVersion := "2.12.18", - libraryDependencies += "com.lihaoyi" %% "fansi" % "0.2.3", + libraryDependencies += "com.lihaoyi" %% "fansi" % "0.2.14", libraryDependencies += "com.lihaoyi" %% "os-lib" % "0.7.8", Test / envVars := Map( "AMMONITE_ASSEMBLY" -> sys.env("AMMONITE_ASSEMBLY"), From 7b0f3bf07f76a8a6b44eb00abef439532c018297 Mon Sep 17 00:00:00 2001 From: Tobias Roeser Date: Wed, 6 Mar 2024 17:00:19 +0100 Subject: [PATCH 2/8] CI: Increase shard count in release process (#1447) This hopefully let the publishing process fail less often. Pull request. https://github.com/com-lihaoyi/Ammonite/pull/1447 --- .github/workflows/release.yml | 4 ++-- build.sc | 21 ++++++++++++--------- 2 files changed, 14 insertions(+), 11 deletions(-) diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 40036958d..28606a59a 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -19,7 +19,7 @@ jobs: release: strategy: matrix: - shard: [ 1, 2, 3, 4, 5 ] + shard: [ 1, 2, 3, 4, 5, 6] fail-fast: false runs-on: ubuntu-latest env: @@ -39,4 +39,4 @@ jobs: java-version: '8' distribution: temurin - run: test -z "$SONATYPE_PGP_SECRET" || echo "$SONATYPE_PGP_SECRET" | base64 --decode | gpg --import --no-tty --batch --yes - - run: GPG_TTY=$(tty) ./mill -i publishSonatype __.publishArtifacts --shard ${{ matrix.shard }} --divisionCount 5 + - run: GPG_TTY=$(tty) ./mill -i publishSonatype __.publishArtifacts --shard ${{ matrix.shard }} --divisionCount 6 diff --git a/build.sc b/build.sc index f4d6807e0..5c418778d 100644 --- a/build.sc +++ b/build.sc @@ -956,15 +956,18 @@ def publishSonatype(publishArtifacts: mill.main.Tasks[PublishModule.PublishData] } if (isPublishableCommit) new SonatypePublisher( - "https://oss.sonatype.org/service/local", - "https://oss.sonatype.org/content/repositories/snapshots", - sys.env("SONATYPE_DEPLOY_USER") + ":" + sys.env("SONATYPE_DEPLOY_PASSWORD"), - true, - Seq("--passphrase", sys.env("SONATYPE_PGP_PASSWORD"), "--no-tty", "--pinentry-mode", "loopback", "--batch", "--yes", "-a", "-b"), - 600000, - 600000, - T.ctx().log, - 600000, + uri = "https://oss.sonatype.org/service/local", + snapshotUri = "https://oss.sonatype.org/content/repositories/snapshots", + credentials = sys.env("SONATYPE_DEPLOY_USER") + ":" + sys.env("SONATYPE_DEPLOY_PASSWORD"), + signed = true, + gpgArgs = Seq("--passphrase", sys.env("SONATYPE_PGP_PASSWORD"), "--no-tty", "--pinentry-mode", "loopback", "--batch", "--yes", "-a", "-b"), + readTimeout = 600000, + connectTimeout = 600000, + log = T.ctx().log, + workspace = T.workspace, + env = T.env, + awaitTimeout = 600000, + stagingRelease = true ).publishAll( true, x:_* From 78d98a149f2a4d7ad81f398edc72475a4fe8c5bf Mon Sep 17 00:00:00 2001 From: Tobias Roeser Date: Wed, 6 Mar 2024 17:06:33 +0100 Subject: [PATCH 3/8] Formatted scala code --- .../ammonite/compiler/iface/CodeWrapper.scala | 34 +- .../ammonite/compiler/iface/Compiler.scala | 17 +- .../compiler/iface/CompilerBuilder.scala | 38 +- .../iface/CompilerLifecycleManager.scala | 12 +- .../ammonite/compiler/iface/Parser.scala | 68 +-- .../compiler/iface/Preprocessor.scala | 62 +-- .../compiler/CompilerCompatibility.scala | 20 +- .../scala/tools/nsc/AmmClassPath.scala | 2 - .../scala/tools/nsc/WhiteListClasspath.scala | 11 +- .../compiler/CompilerCompatibility.scala | 18 +- .../ammonite/compiler/MakeReporter.scala | 20 +- .../ammonite/compiler/MakeReporter.scala | 20 +- .../ammonite/compiler/MakeReporter.scala | 23 +- .../compiler/CompilerCompatibility.scala | 21 +- .../ammonite/compiler/AmmonitePlugin.scala | 255 +++++----- .../scala-2/ammonite/compiler/Compiler.scala | 129 +++--- .../ammonite/compiler/CompilerBuilder.scala | 38 +- .../compiler/CompilerExtensions.scala | 27 +- .../compiler/CompilerLifecycleManager.scala | 74 +-- .../compiler/DefaultPreprocessor.scala | 98 ++-- .../ammonite/compiler/Highlighter.scala | 130 +++--- .../scala-2/ammonite/compiler/Parsers.scala | 196 ++++---- .../scala-2/ammonite/compiler/Pressy.scala | 125 ++--- .../internal/CustomURLZipArchive.scala | 54 ++- .../compiler/tools/HighlightJava.scala | 28 +- .../compiler/tools/SourceRuntime.scala | 216 +++++---- .../ammonite/compiler/tools/desugar.scala | 7 +- .../ammonite/compiler/tools/source.scala | 74 +-- .../CustomZipAndJarFileLookupFactory.scala | 5 +- .../compiler/AsmPositionUpdater.scala | 30 +- .../compiler/CompilerExtensions.scala | 23 +- .../compiler/CompilerLifecycleManager.scala | 77 ++-- .../compiler/internal/CompilerHelper.scala | 4 +- .../ammonite/compiler/tools/source.scala | 2 +- .../compiler/DirectoryClassPath.scala | 3 +- .../compiler/WhiteListClassPath.scala | 11 +- .../scala/tools/nsc/AmmClassPath.scala | 4 +- .../scala/tools/nsc/WhiteListClasspath.scala | 11 +- .../ammonite/compiler/MakeReporter.scala | 16 +- .../ammonite/compiler/CodeClassWrapper.scala | 22 +- .../compiler/DefaultCodeWrapper.scala | 20 +- .../api/src/main/scala/ammonite/Stubs.scala | 42 +- .../ammonite/interp/api/AmmoniteExit.scala | 4 +- .../scala/ammonite/interp/api/InterpAPI.scala | 49 +- .../ammonite/interp/api/IvyConstructor.scala | 6 +- .../ammonite/interp/DependencyLoader.scala | 21 +- .../scala/ammonite/interp/Interpreter.scala | 435 +++++++++--------- .../main/scala/ammonite/interp/IvyThing.scala | 24 +- .../interp/PredefInitialization.scala | 48 +- .../scala/ammonite/interp/Watchable.scala | 32 +- .../interp/script/AmmoniteBuildServer.scala | 91 ++-- .../ammonite/interp/script/Diagnostic.scala | 8 +- .../script/DummyBuildServerImplems.scala | 10 +- .../scala/ammonite/interp/script/Script.scala | 44 +- .../ammonite/interp/script/ScriptCache.scala | 6 +- .../interp/script/ScriptCompileResult.scala | 4 +- .../interp/script/ScriptCompiler.scala | 68 +-- .../interp/script/ScriptProcessor.scala | 27 +- .../interp/script/SemanticdbProcessor.scala | 23 +- .../interp/script/SingleScriptCompiler.scala | 40 +- .../ammonite/repl/api/History.scala | 12 +- .../ammonite/repl/api/History.scala | 6 +- .../api/ReplAPIScalaVersionSpecific.scala | 1 - .../scala/ammonite/repl/FullReplAPI.scala | 35 +- .../scala/ammonite/repl/api/FrontEnd.scala | 18 +- .../scala/ammonite/repl/api/FrontEndAPI.scala | 2 - .../scala/ammonite/repl/api/ReplAPI.scala | 159 ++++--- .../main/scala/ammonite/repl/tools/Util.scala | 4 +- .../scala/ammonite/runtime/tools/Tools.scala | 106 ++--- .../ammonite/runtime/tools/package.scala | 6 +- .../main/scala/ammonite/main/Defaults.scala | 20 +- .../ammonite/repl/AmmoniteFrontEnd.scala | 68 +-- .../main/scala/ammonite/repl/ApiImpls.scala | 20 +- .../scala/ammonite/repl/FrontEndUtils.scala | 18 +- .../main/scala/ammonite/repl/FrontEnds.scala | 50 +- .../main/scala/ammonite/repl/PPrints.scala | 8 +- .../src/main/scala/ammonite/repl/Repl.scala | 117 ++--- .../main/scala/ammonite/repl/Signaller.scala | 12 +- .../main/scala/ammonite/repl/package.scala | 6 +- .../ammonite/unit/SourceTests212.scala | 46 +- .../test/scala/ammonite/DualTestRepl.scala | 7 +- .../scala/ammonite/SerializationUtil.scala | 22 +- .../src/test/scala/ammonite/TestRepl.scala | 128 +++--- .../src/test/scala/ammonite/TestUtils.scala | 15 +- .../ammonite/interp/AutocompleteTests.scala | 142 +++--- .../scala/ammonite/interp/PrintTests.scala | 10 +- .../ammonite/session/AdvancedTests.scala | 98 ++-- .../scala/ammonite/session/BuiltinTests.scala | 38 +- .../scala/ammonite/session/EulerTests.scala | 76 ++- .../ammonite/session/EvaluatorTests.scala | 36 +- .../scala/ammonite/session/FailureTests.scala | 56 +-- .../ammonite/session/ImportHookTests.scala | 31 +- .../scala/ammonite/session/ImportTests.scala | 36 +- .../scala/ammonite/session/ProjectTests.scala | 86 ++-- .../ammonite/session/SerializationTests.scala | 9 +- .../ammonite/testcode/PaulpImports.scala | 6 +- .../scala/ammonite/unit/ClipboardTests.scala | 16 +- .../scala/ammonite/unit/HighlightTests.scala | 34 +- .../scala/ammonite/unit/ParserTests.scala | 32 +- .../scala/ammonite/unit/SourceTests.scala | 31 +- .../scala/ammonite/runtime/ClassLoaders.scala | 185 ++++---- .../scala/ammonite/runtime/Evaluator.scala | 130 +++--- .../scala/ammonite/runtime/ImportHook.scala | 220 +++++---- .../main/scala/ammonite/runtime/Storage.scala | 176 +++---- .../main/scala/ammonite/runtime/package.scala | 8 +- .../main/scala/ammonite/AmmoniteMain.scala | 36 +- amm/src/main/scala/ammonite/Main.scala | 193 ++++---- amm/src/main/scala/ammonite/MainRunner.scala | 66 +-- amm/src/main/scala/ammonite/main/Config.scala | 183 ++++---- .../scala/ammonite/main/ProxyFromEnv.scala | 47 +- .../main/scala/ammonite/main/Scripts.scala | 54 ++- .../main/TrapExitSecurityManager.scala | 2 +- .../main/scala/ammonite/main/package.scala | 8 +- .../scala/ammonite/interp/CachingTests.scala | 117 +++-- .../interp/CompilerSettingsTests.scala | 8 +- .../ammonite/interp/YRangeposTests.scala | 10 +- .../script/AmmoniteBuildServerTests.scala | 9 +- .../interp/script/TestBuildClient.scala | 4 +- .../main/InProcessMainMethodRunner.scala | 41 +- .../InProcessMainMethodRunnerRawArgs.scala | 24 +- .../scala/ammonite/main/LineNumberTests.scala | 109 ++--- .../test/scala/ammonite/main/MainTests.scala | 93 ++-- .../scala/ammonite/session/ScriptTests.scala | 170 ++++--- .../main/scala/ammonite/util/Classpath.scala | 22 +- .../src/main/scala/ammonite/util/Frame.scala | 2 + .../main/scala/ammonite/util/Imports.scala | 128 +++--- .../src/main/scala/ammonite/util/Model.scala | 253 ++++++---- .../util/PositionOffsetConversion.scala | 15 +- .../src/main/scala/ammonite/util/Res.scala | 97 ++-- .../src/main/scala/ammonite/util/Util.scala | 73 ++- .../ammonite/util/WhiteListClassLoader.scala | 29 +- build.sc | 384 +++++++++------- ci/upload.sc | 20 +- .../integration/ProjectTests213.scala | 11 +- .../integration/ErrorTruncationTests.scala | 22 +- .../integration/LineNumberTests.scala | 15 +- .../ammonite/integration/ProjectTests.scala | 14 +- .../ammonite/integration/TestUtils.scala | 35 +- .../scala/ammonite/sshd/ShellSession.scala | 4 +- .../main/scala/ammonite/sshd/SshServer.scala | 4 +- .../scala/ammonite/sshd/SshServerConfig.scala | 20 +- .../main/scala/ammonite/sshd/SshdRepl.scala | 37 +- .../ammonite/sshd/util/Environment.scala | 12 +- sshd/src/test/scala/ammonite/sshd/Main.scala | 15 +- .../ammonite/sshd/ScalaCheckSupport.scala | 8 +- .../scala/ammonite/sshd/SshServerTests.scala | 22 +- .../scala/ammonite/sshd/SshTestingUtils.scala | 18 +- .../main/scala/ammonite/terminal/Filter.scala | 108 +++-- .../scala/ammonite/terminal/FilterTools.scala | 7 +- .../scala/ammonite/terminal/LineReader.scala | 153 +++--- .../scala/ammonite/terminal/Protocol.scala | 11 +- .../scala/ammonite/terminal/SpecialKeys.scala | 96 ++-- .../scala/ammonite/terminal/Terminal.scala | 18 +- .../main/scala/ammonite/terminal/Utils.scala | 66 +-- .../terminal/filters/BasicFilters.scala | 59 ++- .../terminal/filters/GUILikeFilters.scala | 94 ++-- .../terminal/filters/HistoryFilter.scala | 231 +++++----- .../terminal/filters/ReadlineFilters.scala | 13 +- .../terminal/filters/UndoFilter.scala | 83 ++-- .../scala/ammonite/terminal/Checker.scala | 14 +- .../scala/ammonite/terminal/EditTests.scala | 25 +- .../scala/ammonite/terminal/HeightTests.scala | 60 ++- .../ammonite/terminal/HistoryTests.scala | 41 +- .../ammonite/terminal/NavigationTests.scala | 91 ++-- .../scala/ammonite/terminal/TestMain.scala | 22 +- 165 files changed, 4615 insertions(+), 4380 deletions(-) diff --git a/amm/compiler/interface/src/main/scala/ammonite/compiler/iface/CodeWrapper.scala b/amm/compiler/interface/src/main/scala/ammonite/compiler/iface/CodeWrapper.scala index c6ae524d3..86353f570 100644 --- a/amm/compiler/interface/src/main/scala/ammonite/compiler/iface/CodeWrapper.scala +++ b/amm/compiler/interface/src/main/scala/ammonite/compiler/iface/CodeWrapper.scala @@ -6,29 +6,31 @@ import ammonite.util.Util.CodeSource abstract class CodeWrapper { def wrapperPath: Seq[Name] = Nil def apply( - code: String, - source: CodeSource, - imports: Imports, - printCode: String, - indexedWrapper: Name, - extraCode: String + code: String, + source: CodeSource, + imports: Imports, + printCode: String, + indexedWrapper: Name, + extraCode: String ): (String, String, Int) - def wrapCode(codeSource: CodeSource, - indexedWrapperName: Name, - code: String, - printCode: String, - imports: Imports, - extraCode: String, - markScript: Boolean) = { + def wrapCode( + codeSource: CodeSource, + indexedWrapperName: Name, + code: String, + printCode: String, + imports: Imports, + extraCode: String, + markScript: Boolean + ) = { - //we need to normalize topWrapper and bottomWrapper in order to ensure - //the snippets always use the platform-specific newLine + // we need to normalize topWrapper and bottomWrapper in order to ensure + // the snippets always use the platform-specific newLine val extraCode0 = if (markScript) extraCode + "/**/" else extraCode val (topWrapper, bottomWrapper, userCodeNestingLevel) = - apply(code, codeSource, imports, printCode, indexedWrapperName, extraCode0) + apply(code, codeSource, imports, printCode, indexedWrapperName, extraCode0) val (topWrapper0, bottomWrapper0) = if (markScript) (topWrapper + "/**/ /**/" + bottomWrapper) else (topWrapper, bottomWrapper) diff --git a/amm/compiler/interface/src/main/scala/ammonite/compiler/iface/Compiler.scala b/amm/compiler/interface/src/main/scala/ammonite/compiler/iface/Compiler.scala index d2207255d..3fa415741 100644 --- a/amm/compiler/interface/src/main/scala/ammonite/compiler/iface/Compiler.scala +++ b/amm/compiler/interface/src/main/scala/ammonite/compiler/iface/Compiler.scala @@ -5,11 +5,11 @@ import ammonite.util.{Imports, Printer} abstract class Compiler { def compile( - src: Array[Byte], - printer: Printer, - importsLen: Int, - userCodeNestingLevel: Int, - fileName: String + src: Array[Byte], + printer: Printer, + importsLen: Int, + userCodeNestingLevel: Int, + fileName: String ): Option[Compiler.Output] def preprocessor(fileName: String, markGeneratedSections: Boolean = false): Preprocessor @@ -19,10 +19,9 @@ abstract class Compiler { object Compiler { case class Output( - classFiles: Vector[(String, Array[Byte])], - imports: Imports, - usedEarlierDefinitions: Option[Seq[String]] + classFiles: Vector[(String, Array[Byte])], + imports: Imports, + usedEarlierDefinitions: Option[Seq[String]] ) } - diff --git a/amm/compiler/interface/src/main/scala/ammonite/compiler/iface/CompilerBuilder.scala b/amm/compiler/interface/src/main/scala/ammonite/compiler/iface/CompilerBuilder.scala index de682f4f6..7ab1bf34a 100644 --- a/amm/compiler/interface/src/main/scala/ammonite/compiler/iface/CompilerBuilder.scala +++ b/amm/compiler/interface/src/main/scala/ammonite/compiler/iface/CompilerBuilder.scala @@ -8,24 +8,24 @@ import ammonite.util.Frame abstract class CompilerBuilder { def newManager( - rtCacheDir: Option[Path], - headFrame: => Frame, - dependencyCompleter: => Option[String => (Int, Seq[String])], - whiteList: Set[Seq[String]], - initialClassLoader: ClassLoader, - settings: Seq[String] + rtCacheDir: Option[Path], + headFrame: => Frame, + dependencyCompleter: => Option[String => (Int, Seq[String])], + whiteList: Set[Seq[String]], + initialClassLoader: ClassLoader, + settings: Seq[String] ): CompilerLifecycleManager def create( - initialClassPath: Seq[URL], - classPath: Seq[URL], - dynamicClassPath: Seq[(String, Array[Byte])], - evalClassLoader: ClassLoader, - pluginClassLoader: ClassLoader, - reporter: Option[CompilerBuilder.Message => Unit], - settings: Seq[String], - classPathWhiteList: Set[Seq[String]], - lineNumberModifier: Boolean + initialClassPath: Seq[URL], + classPath: Seq[URL], + dynamicClassPath: Seq[(String, Array[Byte])], + evalClassLoader: ClassLoader, + pluginClassLoader: ClassLoader, + reporter: Option[CompilerBuilder.Message => Unit], + settings: Seq[String], + classPathWhiteList: Set[Seq[String]], + lineNumberModifier: Boolean ): Compiler def scalaVersion: String @@ -34,10 +34,10 @@ abstract class CompilerBuilder { object CompilerBuilder { case class Message( - severity: String, - start: Int, - end: Int, - message: String + severity: String, + start: Int, + end: Int, + message: String ) } diff --git a/amm/compiler/interface/src/main/scala/ammonite/compiler/iface/CompilerLifecycleManager.scala b/amm/compiler/interface/src/main/scala/ammonite/compiler/iface/CompilerLifecycleManager.scala index 5ef86ce6f..4714b84a6 100644 --- a/amm/compiler/interface/src/main/scala/ammonite/compiler/iface/CompilerLifecycleManager.scala +++ b/amm/compiler/interface/src/main/scala/ammonite/compiler/iface/CompilerLifecycleManager.scala @@ -18,15 +18,15 @@ abstract class CompilerLifecycleManager { def init(force: Boolean = false): Unit def complete( - offset: Int, - previousImports: String, - snippet: String + offset: Int, + previousImports: String, + snippet: String ): (Int, Seq[String], Seq[String]) def compileClass( - processed: Preprocessor.Output, - printer: Printer, - fileName: String + processed: Preprocessor.Output, + printer: Printer, + fileName: String ): Option[Compiler.Output] def addToClasspath(classFiles: ClassFiles): Unit diff --git a/amm/compiler/interface/src/main/scala/ammonite/compiler/iface/Parser.scala b/amm/compiler/interface/src/main/scala/ammonite/compiler/iface/Parser.scala index a90148ff1..92311bdc8 100644 --- a/amm/compiler/interface/src/main/scala/ammonite/compiler/iface/Parser.scala +++ b/amm/compiler/interface/src/main/scala/ammonite/compiler/iface/Parser.scala @@ -6,44 +6,46 @@ import ammonite.util.Util.CodeSource abstract class Parser { def split( - code: String, - ignoreIncomplete: Boolean = true, - fileName: String = "(console)" + code: String, + ignoreIncomplete: Boolean = true, + fileName: String = "(console)" ): Option[Either[String, Seq[String]]] final def parseImportHooks( - source: CodeSource, - stmts: Seq[String] + source: CodeSource, + stmts: Seq[String] ): (Seq[String], Seq[ImportTree]) = parseImportHooksWithIndices(source, stmts.map((0, _))) def parseImportHooksWithIndices( - source: CodeSource, - stmts: Seq[(Int, String)] + source: CodeSource, + stmts: Seq[(Int, String)] ): (Seq[String], Seq[ImportTree]) /** - * Splits up a script file into its constituent blocks, each of which - * is a tuple of (leading-whitespace, statements). Leading whitespace - * is returned separately so we can later manipulate the statements e.g. - * by adding `val res2 = ` without the whitespace getting in the way - */ + * Splits up a script file into its constituent blocks, each of which + * is a tuple of (leading-whitespace, statements). Leading whitespace + * is returned separately so we can later manipulate the statements e.g. + * by adding `val res2 = ` without the whitespace getting in the way + */ def splitScript( - rawCode: String, - fileName: String + rawCode: String, + fileName: String ): Either[String, IndexedSeq[(String, Seq[String])]] def scriptBlocksWithStartIndices( - rawCode: String, - fileName: String + rawCode: String, + fileName: String ): Either[Parser.ScriptSplittingError, Seq[Parser.ScriptBlock]] - def defaultHighlight(buffer: Vector[Char], - comment: fansi.Attrs, - `type`: fansi.Attrs, - literal: fansi.Attrs, - keyword: fansi.Attrs, - reset: fansi.Attrs, - notImplemented: fansi.Attrs): Vector[Char] + def defaultHighlight( + buffer: Vector[Char], + comment: fansi.Attrs, + `type`: fansi.Attrs, + literal: fansi.Attrs, + keyword: fansi.Attrs, + reset: fansi.Attrs, + notImplemented: fansi.Attrs + ): Vector[Char] def isObjDef(code: String): Boolean } @@ -51,27 +53,27 @@ abstract class Parser { object Parser { case class ParsedImportHooks( - hookStatements: Seq[String], - importTrees: Seq[ImportTree] + hookStatements: Seq[String], + importTrees: Seq[ImportTree] ) case class ScriptBlock( - startIndex: Int, - ncomment: String, - codeWithStartIndices: Seq[(Int, String)] + startIndex: Int, + ncomment: String, + codeWithStartIndices: Seq[(Int, String)] ) object ScriptBlock { def apply( - ncomment: String, - codeWithStartIndices: Seq[(Int, String)] + ncomment: String, + codeWithStartIndices: Seq[(Int, String)] ): ScriptBlock = ScriptBlock(0, ncomment, codeWithStartIndices) } class ScriptSplittingError( - message: String, - val index: Int = -1, - val expected: String = "" + message: String, + val index: Int = -1, + val expected: String = "" ) extends Exception(message) } diff --git a/amm/compiler/interface/src/main/scala/ammonite/compiler/iface/Preprocessor.scala b/amm/compiler/interface/src/main/scala/ammonite/compiler/iface/Preprocessor.scala index 0c7843b32..1d9d1f2c3 100644 --- a/amm/compiler/interface/src/main/scala/ammonite/compiler/iface/Preprocessor.scala +++ b/amm/compiler/interface/src/main/scala/ammonite/compiler/iface/Preprocessor.scala @@ -4,37 +4,37 @@ import ammonite.util.{Imports, Name, Res} import ammonite.util.Util.CodeSource /** - * Responsible for all scala-source-code-munging that happens within the - * Ammonite REPL. - * - * Performs several tasks: - * - * - Takes top-level Scala expressions and assigns them to `res{1, 2, 3, ...}` - * values so they can be accessed later in the REPL - * - * - Wraps the code snippet with an wrapper `object` since Scala doesn't allow - * top-level expressions - * - * - Mangles imports from our [[ammonite.util.ImportData]] data structure into a source - * String - * - * - Combines all of these into a complete compilation unit ready to feed into - * the Scala compiler - */ + * Responsible for all scala-source-code-munging that happens within the + * Ammonite REPL. + * + * Performs several tasks: + * + * - Takes top-level Scala expressions and assigns them to `res{1, 2, 3, ...}` + * values so they can be accessed later in the REPL + * + * - Wraps the code snippet with an wrapper `object` since Scala doesn't allow + * top-level expressions + * + * - Mangles imports from our [[ammonite.util.ImportData]] data structure into a source + * String + * + * - Combines all of these into a complete compilation unit ready to feed into + * the Scala compiler + */ abstract class Preprocessor { def transform( - stmts: Seq[String], - resultIndex: String, - leadingSpaces: String, - codeSource: CodeSource, - indexedWrapperName: Name, - imports: Imports, - printerTemplate: String => String, - extraCode: String, - skipEmpty: Boolean, - markScript: Boolean, - codeWrapper: CodeWrapper + stmts: Seq[String], + resultIndex: String, + leadingSpaces: String, + codeSource: CodeSource, + indexedWrapperName: Name, + imports: Imports, + printerTemplate: String => String, + extraCode: String, + skipEmpty: Boolean, + markScript: Boolean, + codeWrapper: CodeWrapper ): Res[Preprocessor.Output] } @@ -42,9 +42,9 @@ abstract class Preprocessor { object Preprocessor { case class Output( - code: String, - prefixCharLength: Int, - userCodeNestingLevel: Int + code: String, + prefixCharLength: Int, + userCodeNestingLevel: Int ) } diff --git a/amm/compiler/src/main/scala-2.12.0_12/ammonite/compiler/CompilerCompatibility.scala b/amm/compiler/src/main/scala-2.12.0_12/ammonite/compiler/CompilerCompatibility.scala index 1a109e902..ea2885020 100644 --- a/amm/compiler/src/main/scala-2.12.0_12/ammonite/compiler/CompilerCompatibility.scala +++ b/amm/compiler/src/main/scala-2.12.0_12/ammonite/compiler/CompilerCompatibility.scala @@ -19,8 +19,10 @@ object CompilerCompatibility extends ExtraCompilerCompatibility { } } - def interactiveAnalyzer(g: InteractiveGlobal, - cl: ClassLoader): InteractiveAnalyzer { val global: g.type } = { + def interactiveAnalyzer( + g: InteractiveGlobal, + cl: ClassLoader + ): InteractiveAnalyzer { val global: g.type } = { new { val global: g.type = g } with InteractiveAnalyzer { override def findMacroClassLoader() = cl } @@ -28,13 +30,13 @@ object CompilerCompatibility extends ExtraCompilerCompatibility { def importInfo(g: Global)(t: g.Import) = new g.analyzer.ImportInfo(t, 0) - - - def initGlobal(settings: Settings, - reporter: AbstractReporter, - jcp: AggregateClassPath, - evalClassloader: ClassLoader, - createPlugins: Global => List[Plugin]): Global = { + def initGlobal( + settings: Settings, + reporter: AbstractReporter, + jcp: AggregateClassPath, + evalClassloader: ClassLoader, + createPlugins: Global => List[Plugin] + ): Global = { new nsc.Global(settings, reporter) { g => override lazy val plugins = createPlugins(g) diff --git a/amm/compiler/src/main/scala-2.12.10-2.13.1+/scala/tools/nsc/AmmClassPath.scala b/amm/compiler/src/main/scala-2.12.10-2.13.1+/scala/tools/nsc/AmmClassPath.scala index 3efa67977..912f60dc5 100644 --- a/amm/compiler/src/main/scala-2.12.10-2.13.1+/scala/tools/nsc/AmmClassPath.scala +++ b/amm/compiler/src/main/scala-2.12.10-2.13.1+/scala/tools/nsc/AmmClassPath.scala @@ -22,13 +22,11 @@ trait AmmClassPath extends ClassPath { ammList(inPackage.dottedString) } - def ammClasses(inPackage: String): Seq[ClassFileEntry] def classes(inPackage: scala.tools.nsc.classpath.PackageName): Seq[ClassFileEntry] = { ammClasses(inPackage.dottedString) } - def ammHasPackage(pkg: String): Boolean def hasPackage(pkg: scala.tools.nsc.classpath.PackageName) = ammHasPackage(pkg.dottedString) } diff --git a/amm/compiler/src/main/scala-2.12.10-2.13.1+/scala/tools/nsc/WhiteListClasspath.scala b/amm/compiler/src/main/scala-2.12.10-2.13.1+/scala/tools/nsc/WhiteListClasspath.scala index 88905888b..ae955b0f0 100644 --- a/amm/compiler/src/main/scala-2.12.10-2.13.1+/scala/tools/nsc/WhiteListClasspath.scala +++ b/amm/compiler/src/main/scala-2.12.10-2.13.1+/scala/tools/nsc/WhiteListClasspath.scala @@ -6,21 +6,20 @@ import scala.tools.nsc.classpath.ClassPathEntries import scala.tools.nsc.util.ClassPath class WhiteListClasspath(aggregates: Seq[ClassPath], whitelist: Set[Seq[String]]) - extends scala.tools.nsc.classpath.AggregateClassPath(aggregates) { + extends scala.tools.nsc.classpath.AggregateClassPath(aggregates) { override def findClassFile(name: String) = { val tokens = name.split('.') if (Util.lookupWhiteList(whitelist, tokens.init ++ Seq(tokens.last + ".class"))) { super.findClassFile(name) - } - else None + } else None } override def list(inPackage: scala.tools.nsc.classpath.PackageName) = { val superList = super.list(inPackage) ClassPathEntries( - superList.packages.filter{ p => Util.lookupWhiteList(whitelist, p.name.split('.')) }, - superList.classesAndSources.filter{ t => + superList.packages.filter { p => Util.lookupWhiteList(whitelist, p.name.split('.')) }, + superList.classesAndSources.filter { t => Util.lookupWhiteList(whitelist, inPackage.dottedString.split('.') ++ Seq(t.name + ".class")) } ) } -} \ No newline at end of file +} diff --git a/amm/compiler/src/main/scala-2.12.13+/ammonite/compiler/CompilerCompatibility.scala b/amm/compiler/src/main/scala-2.12.13+/ammonite/compiler/CompilerCompatibility.scala index c36762350..115f6deee 100644 --- a/amm/compiler/src/main/scala-2.12.13+/ammonite/compiler/CompilerCompatibility.scala +++ b/amm/compiler/src/main/scala-2.12.13+/ammonite/compiler/CompilerCompatibility.scala @@ -20,19 +20,23 @@ object CompilerCompatibility { } } - def interactiveAnalyzer(g: InteractiveGlobal, - cl: ClassLoader): InteractiveAnalyzer { val global: g.type } = { + def interactiveAnalyzer( + g: InteractiveGlobal, + cl: ClassLoader + ): InteractiveAnalyzer { val global: g.type } = { new { val global: g.type = g } with InteractiveAnalyzer } def importInfo(g: Global)(t: g.Import) = new g.analyzer.ImportInfo(t, 0) - def initGlobal(settings: Settings, - reporter: Reporter, - jcp: AggregateClassPath, - evalClassloader: ClassLoader, - createPlugins: Global => List[Plugin]): Global = { + def initGlobal( + settings: Settings, + reporter: Reporter, + jcp: AggregateClassPath, + evalClassloader: ClassLoader, + createPlugins: Global => List[Plugin] + ): Global = { new nsc.Global(settings, reporter) { g => override lazy val plugins = createPlugins(g) diff --git a/amm/compiler/src/main/scala-2.12.13+/ammonite/compiler/MakeReporter.scala b/amm/compiler/src/main/scala-2.12.13+/ammonite/compiler/MakeReporter.scala index 3b9fbf5f2..b9f2a70a6 100644 --- a/amm/compiler/src/main/scala-2.12.13+/ammonite/compiler/MakeReporter.scala +++ b/amm/compiler/src/main/scala-2.12.13+/ammonite/compiler/MakeReporter.scala @@ -16,19 +16,23 @@ object MakeReporter { type Reporter = scala.tools.nsc.reporters.Reporter - def makeReporter(errorLogger: (Position, String) => Unit, - warningLogger: (Position, String) => Unit, - infoLogger: (Position, String) => Unit, - outerSettings: Settings): Reporter = + def makeReporter( + errorLogger: (Position, String) => Unit, + warningLogger: (Position, String) => Unit, + infoLogger: (Position, String) => Unit, + outerSettings: Settings + ): Reporter = new FilteringReporter { - def doReport(pos: scala.reflect.internal.util.Position, - msg: String, - severity: Severity): Unit = + def doReport( + pos: scala.reflect.internal.util.Position, + msg: String, + severity: Severity + ): Unit = display(pos, msg, severity) def display(pos: Position, msg: String, severity: Severity) = - severity match{ + severity match { case ERROR => Classpath.traceClasspathProblem(s"ERROR: $msg") errorLogger(pos, msg) diff --git a/amm/compiler/src/main/scala-2.13.1-2.13.11/ammonite/compiler/MakeReporter.scala b/amm/compiler/src/main/scala-2.13.1-2.13.11/ammonite/compiler/MakeReporter.scala index 3b9fbf5f2..b9f2a70a6 100644 --- a/amm/compiler/src/main/scala-2.13.1-2.13.11/ammonite/compiler/MakeReporter.scala +++ b/amm/compiler/src/main/scala-2.13.1-2.13.11/ammonite/compiler/MakeReporter.scala @@ -16,19 +16,23 @@ object MakeReporter { type Reporter = scala.tools.nsc.reporters.Reporter - def makeReporter(errorLogger: (Position, String) => Unit, - warningLogger: (Position, String) => Unit, - infoLogger: (Position, String) => Unit, - outerSettings: Settings): Reporter = + def makeReporter( + errorLogger: (Position, String) => Unit, + warningLogger: (Position, String) => Unit, + infoLogger: (Position, String) => Unit, + outerSettings: Settings + ): Reporter = new FilteringReporter { - def doReport(pos: scala.reflect.internal.util.Position, - msg: String, - severity: Severity): Unit = + def doReport( + pos: scala.reflect.internal.util.Position, + msg: String, + severity: Severity + ): Unit = display(pos, msg, severity) def display(pos: Position, msg: String, severity: Severity) = - severity match{ + severity match { case ERROR => Classpath.traceClasspathProblem(s"ERROR: $msg") errorLogger(pos, msg) diff --git a/amm/compiler/src/main/scala-2.13.12+/ammonite/compiler/MakeReporter.scala b/amm/compiler/src/main/scala-2.13.12+/ammonite/compiler/MakeReporter.scala index 02a864110..7abdf32ed 100644 --- a/amm/compiler/src/main/scala-2.13.12+/ammonite/compiler/MakeReporter.scala +++ b/amm/compiler/src/main/scala-2.13.12+/ammonite/compiler/MakeReporter.scala @@ -16,21 +16,24 @@ object MakeReporter { type Reporter = scala.tools.nsc.reporters.Reporter - def makeReporter(errorLogger: (Position, String) => Unit, - warningLogger: (Position, String) => Unit, - infoLogger: (Position, String) => Unit, - outerSettings: Settings): Reporter = + def makeReporter( + errorLogger: (Position, String) => Unit, + warningLogger: (Position, String) => Unit, + infoLogger: (Position, String) => Unit, + outerSettings: Settings + ): Reporter = new FilteringReporter { - override - def doReport(pos: scala.reflect.internal.util.Position, - msg: String, - severity: Severity, - actions: List[CodeAction]): Unit = + override def doReport( + pos: scala.reflect.internal.util.Position, + msg: String, + severity: Severity, + actions: List[CodeAction] + ): Unit = display(pos, msg, severity) def display(pos: Position, msg: String, severity: Severity) = - severity match{ + severity match { case ERROR => Classpath.traceClasspathProblem(s"ERROR: $msg") errorLogger(pos, msg) diff --git a/amm/compiler/src/main/scala-2.13/ammonite/compiler/CompilerCompatibility.scala b/amm/compiler/src/main/scala-2.13/ammonite/compiler/CompilerCompatibility.scala index 0e36feaf5..ede3cb5ef 100644 --- a/amm/compiler/src/main/scala-2.13/ammonite/compiler/CompilerCompatibility.scala +++ b/amm/compiler/src/main/scala-2.13/ammonite/compiler/CompilerCompatibility.scala @@ -31,20 +31,23 @@ object CompilerCompatibility { } } - def interactiveAnalyzer(g: InteractiveGlobal, - cl: ClassLoader): InteractiveAnalyzer { val global: g.type } = { - new { val global: g.type = g } with InteractiveAnalyzer { - } + def interactiveAnalyzer( + g: InteractiveGlobal, + cl: ClassLoader + ): InteractiveAnalyzer { val global: g.type } = { + new { val global: g.type = g } with InteractiveAnalyzer {} } def importInfo(g: Global)(t: g.Import) = new g.analyzer.ImportInfo(t, 0, false) - def initGlobal(settings: Settings, - reporter: Reporter, - jcp: AggregateClassPath, - evalClassloader: ClassLoader, - createPlugins: Global => List[Plugin]): Global = { + def initGlobal( + settings: Settings, + reporter: Reporter, + jcp: AggregateClassPath, + evalClassloader: ClassLoader, + createPlugins: Global => List[Plugin] + ): Global = { new nsc.Global(settings, reporter) { g => override lazy val plugins = createPlugins(g) diff --git a/amm/compiler/src/main/scala-2/ammonite/compiler/AmmonitePlugin.scala b/amm/compiler/src/main/scala-2/ammonite/compiler/AmmonitePlugin.scala index 4190e1bc2..b8d7755ff 100644 --- a/amm/compiler/src/main/scala-2/ammonite/compiler/AmmonitePlugin.scala +++ b/amm/compiler/src/main/scala-2/ammonite/compiler/AmmonitePlugin.scala @@ -12,12 +12,14 @@ import scala.reflect.internal.util._ * to the `output` function. Needs to be a compiler plugin so we can hook in * immediately after the `typer` */ -class AmmonitePlugin(g: scala.tools.nsc.Global, - output: Seq[ImportData] => Unit, - usedEarlierDefinitions: Seq[String] => Unit, - userCodeNestingLevel: => Int, - topWrapperLen: => Int, - lineNumberModifier: Boolean) extends Plugin{ +class AmmonitePlugin( + g: scala.tools.nsc.Global, + output: Seq[ImportData] => Unit, + usedEarlierDefinitions: Seq[String] => Unit, + userCodeNestingLevel: => Int, + topWrapperLen: => Int, + lineNumberModifier: Boolean +) extends Plugin { val name: String = "AmmonitePlugin" val global: Global = g val description: String = "Extracts the names in scope for the Ammonite REPL to use" @@ -58,7 +60,11 @@ class AmmonitePlugin(g: scala.tools.nsc.Global, def apply(unit: g.CompilationUnit): Unit = { val things = global.currentRun.units.map(_.source.path).toList AmmonitePlugin(g)( - unit, output, usedEarlierDefinitions, userCodeNestingLevel, topWrapperLen + unit, + output, + usedEarlierDefinitions, + userCodeNestingLevel, + topWrapperLen ) } } @@ -68,16 +74,15 @@ class AmmonitePlugin(g: scala.tools.nsc.Global, } } - -object AmmonitePlugin{ +object AmmonitePlugin { var count = 0 - def apply(g: Global) - (unit: g.CompilationUnit, - output: Seq[ImportData] => Unit, - usedEarlierDefinitions: Seq[String] => Unit, - userCodeNestingLevel: => Int, - topWrapperLen: => Int) = { - + def apply(g: Global)( + unit: g.CompilationUnit, + output: Seq[ImportData] => Unit, + usedEarlierDefinitions: Seq[String] => Unit, + userCodeNestingLevel: => Int, + topWrapperLen: => Int + ) = { count += 1 def decode(t: g.Tree) = { @@ -104,11 +109,11 @@ object AmmonitePlugin{ userCodeNestingLevel match { case 1 => - /* - * We don't try to determine what previous commands are actually used here. - * userCodeNestingLevel == 1 likely corresponds to the default object-based - * code wrapper, which doesn't rely on the actually used previous commands. - */ + /* + * We don't try to determine what previous commands are actually used here. + * userCodeNestingLevel == 1 likely corresponds to the default object-based + * code wrapper, which doesn't rely on the actually used previous commands. + */ case 2 => /* @@ -155,110 +160,108 @@ object AmmonitePlugin{ } val symbols = stats.filter(x => !Option(x.symbol).exists(_.isPrivate)) - .foldLeft(List.empty[(Boolean, String, String, Seq[Name])]){ - // These are all the ways we want to import names from previous - // executions into the current one. Most are straightforward, except - // `import` statements for which we make use of the typechecker to - // resolve the imported names - case (ctx, t @ g.Import(expr, selectors)) => - - def rec(expr: g.Tree): List[(g.Name, g.Symbol)] = { - expr match { - case s @ g.Select(lhs, _) => (s.symbol.name -> s.symbol) :: rec(lhs) - case i @ g.Ident(name) => List(name -> i.symbol) - case t @ g.This(pkg) => List(pkg -> t.symbol) + .foldLeft(List.empty[(Boolean, String, String, Seq[Name])]) { + // These are all the ways we want to import names from previous + // executions into the current one. Most are straightforward, except + // `import` statements for which we make use of the typechecker to + // resolve the imported names + case (ctx, t @ g.Import(expr, selectors)) => + def rec(expr: g.Tree): List[(g.Name, g.Symbol)] = { + expr match { + case s @ g.Select(lhs, _) => (s.symbol.name -> s.symbol) :: rec(lhs) + case i @ g.Ident(name) => List(name -> i.symbol) + case t @ g.This(pkg) => List(pkg -> t.symbol) + } } - } - val (nameList, symbolList) = rec(expr).reverse.unzip - - // Note: we need to take the symbol on the left-most name and get it's - // `.fullName`. Otherwise if we're in - // - // ``` - // package foo.bar.baz - // object Wrapper{val x = ...; import x._} - // ``` - // - // The import will get treated as from `Wrapper.x`, but the person - // running that import will not be in package `foo.bar.baz` and will - // not be able to find `Wrapper`! Thus we need to get the full name. - // In cases where the left-most name is a top-level package, - // `.fullName` is basically a no-op and it works as intended. - // - // Apart from this, all other imports should resolve either to one - // of these cases or importing-from-an-existing import, both of which - // should work without modification - - val headFullPath = NameTransformer.decode(symbolList.head.fullName).split('.').map(Name(_)) - // prefix package imports with `_root_` to try and stop random - // variables from interfering with them. If someone defines a value - // called `_root_`, this will still break, but that's their problem - val rootPrefix = if(symbolList.head.isPackage) Seq(Name("_root_")) else Nil - val tailPath = nameList.tail.map(_.decoded).map(Name(_)) - - val prefix = rootPrefix ++ headFullPath ++ tailPath - - /** - * A map of each name importable from `expr`, to a `Seq[Boolean]` - * containing a `true` if there's a type-symbol you can import, `false` - * if there's a non-type symbol and both if there are both type and - * non-type symbols that are importable for that name - */ - val importableIsTypes = - expr.tpe + val (nameList, symbolList) = rec(expr).reverse.unzip + + // Note: we need to take the symbol on the left-most name and get it's + // `.fullName`. Otherwise if we're in + // + // ``` + // package foo.bar.baz + // object Wrapper{val x = ...; import x._} + // ``` + // + // The import will get treated as from `Wrapper.x`, but the person + // running that import will not be in package `foo.bar.baz` and will + // not be able to find `Wrapper`! Thus we need to get the full name. + // In cases where the left-most name is a top-level package, + // `.fullName` is basically a no-op and it works as intended. + // + // Apart from this, all other imports should resolve either to one + // of these cases or importing-from-an-existing import, both of which + // should work without modification + + val headFullPath = + NameTransformer.decode(symbolList.head.fullName).split('.').map(Name(_)) + // prefix package imports with `_root_` to try and stop random + // variables from interfering with them. If someone defines a value + // called `_root_`, this will still break, but that's their problem + val rootPrefix = if (symbolList.head.isPackage) Seq(Name("_root_")) else Nil + val tailPath = nameList.tail.map(_.decoded).map(Name(_)) + + val prefix = rootPrefix ++ headFullPath ++ tailPath + + /** + * A map of each name importable from `expr`, to a `Seq[Boolean]` + * containing a `true` if there's a type-symbol you can import, `false` + * if there's a non-type symbol and both if there are both type and + * non-type symbols that are importable for that name + */ + val importableIsTypes = + expr.tpe .members .filter(saneSym(_)) .groupBy(_.name.decoded) .mapValues(_.map(_.isType).toVector) + val renamings = for { + t @ g.ImportSelector(name, _, rename, _) <- selectors + isType <- importableIsTypes.getOrElse(name.decode, Nil) // getOrElse just in case... + } yield Option(rename).map(x => name.decoded -> (isType, x.decoded)) - val renamings = for{ - t @ g.ImportSelector(name, _, rename, _) <- selectors - isType <- importableIsTypes.getOrElse(name.decode, Nil) // getOrElse just in case... - } yield Option(rename).map(x => name.decoded -> (isType, x.decoded)) + val renameMap = renamings.flatten.map(_.swap).toMap + val info = CompilerCompatibility.importInfo(g)(t) - val renameMap = renamings.flatten.map(_.swap).toMap - val info = CompilerCompatibility.importInfo(g)(t) - - val symNames = for { - sym <- info.allImportedSymbols - if saneSym(sym) - } yield { - (sym.isType, sym.decodedName) - } + val symNames = for { + sym <- info.allImportedSymbols + if saneSym(sym) + } yield { + (sym.isType, sym.decodedName) + } - val syms = for{ - // For some reason `info.allImportedSymbols` does not show imported - // type aliases when they are imported directly e.g. - // - // import scala.reflect.macros.Context - // - // As opposed to via import scala.reflect.macros._. - // Thus we need to combine allImportedSymbols with the renameMap - (isType, sym) <- (symNames.toList ++ renameMap.keys).distinct - } yield { - (isType, renameMap.getOrElse((isType, sym), sym), sym, prefix) - } - syms ::: ctx - case (ctx, t @ g.DefDef(_, _, _, _, _, _)) => decode(t) :: ctx - case (ctx, t @ g.ValDef(_, _, _, _)) => decode(t) :: ctx - case (ctx, t @ g.ClassDef(_, _, _, _)) => decode(t) :: ctx - case (ctx, t @ g.ModuleDef(_, _, _)) => decode(t) :: ctx - case (ctx, t @ g.TypeDef(_, _, _, _)) => decode(t) :: ctx - case (ctx, t) => ctx - } + val syms = for { + // For some reason `info.allImportedSymbols` does not show imported + // type aliases when they are imported directly e.g. + // + // import scala.reflect.macros.Context + // + // As opposed to via import scala.reflect.macros._. + // Thus we need to combine allImportedSymbols with the renameMap + (isType, sym) <- (symNames.toList ++ renameMap.keys).distinct + } yield { + (isType, renameMap.getOrElse((isType, sym), sym), sym, prefix) + } + syms ::: ctx + case (ctx, t @ g.DefDef(_, _, _, _, _, _)) => decode(t) :: ctx + case (ctx, t @ g.ValDef(_, _, _, _)) => decode(t) :: ctx + case (ctx, t @ g.ClassDef(_, _, _, _)) => decode(t) :: ctx + case (ctx, t @ g.ModuleDef(_, _, _)) => decode(t) :: ctx + case (ctx, t @ g.TypeDef(_, _, _, _)) => decode(t) :: ctx + case (ctx, t) => ctx + } val grouped = symbols.distinct - .groupBy{case (a, b, c, d) => (b, c, d) } - .mapValues(_.map(_._1)) - + .groupBy { case (a, b, c, d) => (b, c, d) } + .mapValues(_.map(_._1)) val open = for { ((fromName, toName, importString), items) <- grouped if !CompilerUtil.ignoredNames(fromName) } yield { - val importType = items match{ + val importType = items match { case Seq(true) => ImportData.Type case Seq(false) => ImportData.Term case Seq(_, _) => ImportData.TermType @@ -277,16 +280,14 @@ object AmmonitePlugin{ } } - object LineNumberModifier { - def apply(g: Global)(unit: g.CompilationUnit, - topWrapperLen: => Int) = { + def apply(g: Global)(unit: g.CompilationUnit, topWrapperLen: => Int) = { object LineNumberCorrector extends g.Transformer { import scala.reflect.internal.util._ - private val trimmedSource = new BatchSourceFile(g.currentSource.file, - g.currentSource.content.drop(topWrapperLen)) + private val trimmedSource = + new BatchSourceFile(g.currentSource.file, g.currentSource.content.drop(topWrapperLen)) override def transform(tree: g.Tree) = { val transformedTree = super.transform(tree) @@ -299,22 +300,22 @@ object LineNumberModifier { // The ticket https://github.com/scala/scala-dev/issues/390 tracks down // relaxing the aggressive validation. val newPos = tree.pos match { - case s : TransparentPosition if s.start > topWrapperLen => - new TransparentPosition( - trimmedSource, - s.start - topWrapperLen, - s.point - topWrapperLen, - s.end - topWrapperLen - ) + case s: TransparentPosition if s.start > topWrapperLen => + new TransparentPosition( + trimmedSource, + s.start - topWrapperLen, + s.point - topWrapperLen, + s.end - topWrapperLen + ) case s: RangePosition if s.start > topWrapperLen => - new RangePosition( - trimmedSource, - s.start - topWrapperLen, - s.point - topWrapperLen, - s.end - topWrapperLen - ) + new RangePosition( + trimmedSource, + s.start - topWrapperLen, + s.point - topWrapperLen, + s.end - topWrapperLen + ) case s: OffsetPosition if s.start > topWrapperLen => - new OffsetPosition(trimmedSource, s.point - topWrapperLen) + new OffsetPosition(trimmedSource, s.point - topWrapperLen) case s => s } diff --git a/amm/compiler/src/main/scala-2/ammonite/compiler/Compiler.scala b/amm/compiler/src/main/scala-2/ammonite/compiler/Compiler.scala index 2c68e989d..db0e6f461 100644 --- a/amm/compiler/src/main/scala-2/ammonite/compiler/Compiler.scala +++ b/amm/compiler/src/main/scala-2/ammonite/compiler/Compiler.scala @@ -1,6 +1,5 @@ package ammonite.compiler - import java.io.OutputStream import java.nio.file.{Files, Path, Paths} @@ -23,7 +22,6 @@ import scala.tools.nsc.{CustomZipAndJarFileLookupFactory, Global, Settings} import scala.tools.nsc.interactive.Response import scala.tools.nsc.plugins.Plugin - /** * Encapsulates (almost) all the ickiness of Scalac so it doesn't leak into * the rest of the codebase. Makes use of a good amount of mutable state @@ -41,16 +39,19 @@ trait Compiler extends ICompiler { var userCodeNestingLevel = -1 } -object Compiler{ +object Compiler { + /** - * Writes files to dynamicClasspath. Needed for loading cached classes. - */ - def addToClasspath(classFiles: Traversable[(String, Array[Byte])], - dynamicClasspath: VirtualDirectory, - outputDir: Option[Path]): Unit = { + * Writes files to dynamicClasspath. Needed for loading cached classes. + */ + def addToClasspath( + classFiles: Traversable[(String, Array[Byte])], + dynamicClasspath: VirtualDirectory, + outputDir: Option[Path] + ): Unit = { val outputDir0 = outputDir.map(os.Path(_, os.pwd)) - for((name, bytes) <- classFiles){ + for ((name, bytes) <- classFiles) { val elems = name.split('/').toList val output = writeDeep(dynamicClasspath, elems) output.write(bytes) @@ -62,8 +63,7 @@ object Compiler{ } - def writeDeep(d: VirtualDirectory, - path: List[String]): OutputStream = path match { + def writeDeep(d: VirtualDirectory, path: List[String]): OutputStream = path match { case head :: Nil => d.fileNamed(head).output case head :: rest => writeDeep( @@ -76,8 +76,8 @@ object Compiler{ } /** - * Converts a bunch of bytes into Scalac's weird VirtualFile class - */ + * Converts a bunch of bytes into Scalac's weird VirtualFile class + */ def makeFile(src: Array[Byte], name: String) = { val segments = name.split("/", -1) @@ -101,24 +101,24 @@ object Compiler{ ) } - - - def apply(classpath: Seq[java.net.URL], - dynamicClasspath: VirtualDirectory, - outputDir: Option[Path], - evalClassloader: => ClassLoader, - pluginClassloader: => ClassLoader, - shutdownPressy: () => Unit, - reporterOpt: Option[MakeReporter.Reporter], - settings: Settings, - classPathWhitelist: Set[Seq[String]], - initialClassPath: Seq[java.net.URL], - lineNumberModifier: Boolean = true): Compiler = new Compiler{ + def apply( + classpath: Seq[java.net.URL], + dynamicClasspath: VirtualDirectory, + outputDir: Option[Path], + evalClassloader: => ClassLoader, + pluginClassloader: => ClassLoader, + shutdownPressy: () => Unit, + reporterOpt: Option[MakeReporter.Reporter], + settings: Settings, + classPathWhitelist: Set[Seq[String]], + initialClassPath: Seq[java.net.URL], + lineNumberModifier: Boolean = true + ): Compiler = new Compiler { def preprocessor(fileName: String, markGeneratedSections: Boolean): Preprocessor = new DefaultPreprocessor(parse(fileName, _), markGeneratedSections) - if(sys.env.contains("DIE"))??? + if (sys.env.contains("DIE")) ??? val PluginXML = "scalac-plugin.xml" lazy val plugins0 = { import scala.collection.JavaConverters._ @@ -152,13 +152,13 @@ object Compiler{ catch { case _: ClassNotFoundException => None } } yield (name, className, classOpt) - val notFound = plugins.collect{case (name, className, None) => (name, className) } + val notFound = plugins.collect { case (name, className, None) => (name, className) } if (notFound.nonEmpty) { for ((name, className) <- notFound.sortBy(_._1)) Console.err.println(s"Implementation $className of plugin $name not found.") } - plugins.collect{case (name, _, Some(cls)) => name -> cls } + plugins.collect { case (name, _, Some(cls)) => name -> cls } } var errorLogger: String => Unit = s => () @@ -208,7 +208,9 @@ object Compiler{ } val scalac = CompilerCompatibility.initGlobal( - settings, reporter, jcp, + settings, + reporter, + jcp, evalClassloader, createPlugins = g => { List( @@ -233,11 +235,14 @@ object Compiler{ (name, cls) <- plugins0 plugin = Plugin.instantiate(cls, g) initOk = - try plugin.init(pluginSettings.getOrElse(name, Nil), g.globalError) - catch { case ex: Exception => - Console.err.println(s"Warning: disabling plugin $name, initialization failed: $ex") - false - } + try plugin.init(pluginSettings.getOrElse(name, Nil), g.globalError) + catch { + case ex: Exception => + Console.err.println( + s"Warning: disabling plugin $name, initialization failed: $ex" + ) + false + } if initOk } yield plugin } @@ -253,26 +258,25 @@ object Compiler{ (vd, reporter, scalac) } - /** * Compiles a blob of bytes and spits of a list of classfiles - * importsLen0 is the length of topWrapper appended above code by wrappedCode function - * It is passed to AmmonitePlugin to decrease this much offset from each AST node - * corresponding to the actual code so as to correct the line numbers in error report + * importsLen0 is the length of topWrapper appended above code by wrappedCode function + * It is passed to AmmonitePlugin to decrease this much offset from each AST node + * corresponding to the actual code so as to correct the line numbers in error report */ - def compile(src: Array[Byte], - printer: Printer, - importsLen0: Int, - userCodeNestingLevel: Int, - fileName: String): Option[ICompiler.Output] = { + def compile( + src: Array[Byte], + printer: Printer, + importsLen0: Int, + userCodeNestingLevel: Int, + fileName: String + ): Option[ICompiler.Output] = { def enumerateVdFiles(d: VirtualDirectory): Iterator[AbstractFile] = { val (subs, files) = d.iterator.partition(_.isDirectory) files ++ subs.map(_.asInstanceOf[VirtualDirectory]).flatMap(enumerateVdFiles) } - - compiler.reporter.reset() this.errorLogger = printer.error this.warningLogger = printer.warning @@ -292,7 +296,7 @@ object Compiler{ shutdownPressy() - val files = for(x <- outputFiles if x.name.endsWith(".class")) yield { + val files = for (x <- outputFiles if x.name.endsWith(".class")) yield { val segments = x.path.split("/").toList.tail val output = writeDeep(dynamicClasspath, segments) output.write(x.toByteArray) @@ -314,7 +318,6 @@ object Compiler{ } } - def parse(fileName: String, line: String): Either[String, Seq[Global#Tree]] = { val errors = mutable.Buffer.empty[String] val warnings = mutable.Buffer.empty[String] @@ -331,7 +334,6 @@ object Compiler{ } } - def prepareJarCp(jarDeps: Seq[java.net.URL], settings: Settings) = { jarDeps.filter(x => x.getPath.endsWith(".jar") || Classpath.canBeOpenedAsJar(x)) .flatMap { x => @@ -340,8 +342,7 @@ object Compiler{ if (Files.exists(path)) { val arc = new FileZipArchive(path.toFile) Seq(CompilerCompatibility.createZipJarFactory(arc, settings)) - } - else + } else Nil } else { val arc = new internal.CustomURLZipArchive(x) @@ -359,17 +360,20 @@ object Compiler{ Nil } } + /** - * Code to initialize random bits and pieces that are needed - * for the Scala compiler to function, common between the - * normal and presentation compiler - */ - def initGlobalClasspath(dirDeps: Seq[java.net.URL], - jarDeps: Seq[java.net.URL], - dynamicClasspath: VirtualDirectory, - settings: Settings, - classPathWhitelist: Set[Seq[String]], - initialClassPath: Seq[java.net.URL]) = { + * Code to initialize random bits and pieces that are needed + * for the Scala compiler to function, common between the + * normal and presentation compiler + */ + def initGlobalClasspath( + dirDeps: Seq[java.net.URL], + jarDeps: Seq[java.net.URL], + dynamicClasspath: VirtualDirectory, + settings: Settings, + classPathWhitelist: Set[Seq[String]], + initialClassPath: Seq[java.net.URL] + ) = { val (initialDirDeps, newDirDeps) = dirDeps.partition(initialClassPath.contains) val (initialJarDeps, newJarDeps) = jarDeps.partition(initialClassPath.contains) @@ -378,7 +382,7 @@ object Compiler{ val newDirCp = prepareDirCp(newDirDeps) val initialDirCp = prepareDirCp(initialDirDeps) - val dynamicCP = new VirtualDirectoryClassPath(dynamicClasspath){ + val dynamicCP = new VirtualDirectoryClassPath(dynamicClasspath) { override def getSubDir(packageDirName: String): Option[AbstractFile] = { val pathParts = packageDirName.split('/') @@ -413,7 +417,6 @@ object Compiler{ ) val jcp = new AggregateClassPath(Seq(staticCP, dynamicCP) ++ newJarCp ++ newDirCp) - jcp } } diff --git a/amm/compiler/src/main/scala-2/ammonite/compiler/CompilerBuilder.scala b/amm/compiler/src/main/scala-2/ammonite/compiler/CompilerBuilder.scala index f3eaf3fe6..8655404d5 100644 --- a/amm/compiler/src/main/scala-2/ammonite/compiler/CompilerBuilder.scala +++ b/amm/compiler/src/main/scala-2/ammonite/compiler/CompilerBuilder.scala @@ -3,10 +3,7 @@ package ammonite.compiler import java.net.URL import java.nio.file.Path -import ammonite.compiler.iface.{ - Compiler => ICompiler, - CompilerBuilder => ICompilerBuilder -} +import ammonite.compiler.iface.{Compiler => ICompiler, CompilerBuilder => ICompilerBuilder} import ammonite.util.Frame import scala.collection.mutable @@ -14,20 +11,19 @@ import scala.reflect.internal.util.{NoPosition, Position} import scala.reflect.io.VirtualDirectory import scala.tools.nsc.Settings - case class CompilerBuilder( - outputDir: Option[Path] = None + outputDir: Option[Path] = None ) extends ICompilerBuilder { def create( - initialClassPath: Seq[URL], - classPath: Seq[URL], - dynamicClassPath: Seq[(String, Array[Byte])], - evalClassLoader: ClassLoader, - pluginClassLoader: ClassLoader, - reporter: Option[ICompilerBuilder.Message => Unit], - settings: Seq[String], - classPathWhiteList: Set[Seq[String]], - lineNumberModifier: Boolean + initialClassPath: Seq[URL], + classPath: Seq[URL], + dynamicClassPath: Seq[(String, Array[Byte])], + evalClassLoader: ClassLoader, + pluginClassLoader: ClassLoader, + reporter: Option[ICompilerBuilder.Message => Unit], + settings: Seq[String], + classPathWhiteList: Set[Seq[String]], + lineNumberModifier: Boolean ): ICompiler = { val vd = new VirtualDirectory("(memory)", None) @@ -76,12 +72,12 @@ case class CompilerBuilder( } def newManager( - rtCacheDir: Option[Path], - headFrame: => Frame, - dependencyCompleter: => Option[String => (Int, Seq[String])], - whiteList: Set[Seq[String]], - initialClassLoader: ClassLoader, - settings: Seq[String] + rtCacheDir: Option[Path], + headFrame: => Frame, + dependencyCompleter: => Option[String => (Int, Seq[String])], + whiteList: Set[Seq[String]], + initialClassLoader: ClassLoader, + settings: Seq[String] ): CompilerLifecycleManager = new CompilerLifecycleManager( rtCacheDir, diff --git a/amm/compiler/src/main/scala-2/ammonite/compiler/CompilerExtensions.scala b/amm/compiler/src/main/scala-2/ammonite/compiler/CompilerExtensions.scala index b56a234b1..2524d2254 100644 --- a/amm/compiler/src/main/scala-2/ammonite/compiler/CompilerExtensions.scala +++ b/amm/compiler/src/main/scala-2/ammonite/compiler/CompilerExtensions.scala @@ -12,25 +12,25 @@ object CompilerExtensions { private def compilerManager = api._compilerManager.asInstanceOf[CompilerLifecycleManager] /** - * Configures the current compiler, or if the compiler hasn't been initialized - * yet, registers the configuration callback and applies it to the compiler - * when it ends up being initialized later - */ + * Configures the current compiler, or if the compiler hasn't been initialized + * yet, registers the configuration callback and applies it to the compiler + * when it ends up being initialized later + */ def configureCompiler(c: scala.tools.nsc.Global => Unit): Unit = compilerManager.configureCompiler(c) /** - * Pre-configures the next compiler. Useful for tuning options that are - * used during parsing such as -Yrangepos - */ + * Pre-configures the next compiler. Useful for tuning options that are + * used during parsing such as -Yrangepos + */ def preConfigureCompiler(c: scala.tools.nsc.Settings => Unit): Unit = compilerManager.preConfigureCompiler(c) /** - * Directory where the byte code resulting from compiling the user code is written. - * This is non-empty only if the `--output-directory` or `--tmp-output-directory` options - * are passed to Ammonite upon launch. - */ + * Directory where the byte code resulting from compiling the user code is written. + * This is non-empty only if the `--output-directory` or `--tmp-output-directory` options + * are passed to Ammonite upon launch. + */ def outputDir: Option[Path] = compilerManager.outputDir } @@ -39,7 +39,6 @@ object CompilerExtensions { private def compilerManager = api._compilerManager.asInstanceOf[CompilerLifecycleManager] - /** * Access the compiler to do crazy things if you really want to! */ @@ -47,8 +46,8 @@ object CompilerExtensions { compilerManager.compiler.compiler /** - * Access the presentation compiler to do even crazier things if you really want to! - */ + * Access the presentation compiler to do even crazier things if you really want to! + */ def interactiveCompiler: scala.tools.nsc.interactive.Global = compilerManager.pressy.compiler } diff --git a/amm/compiler/src/main/scala-2/ammonite/compiler/CompilerLifecycleManager.scala b/amm/compiler/src/main/scala-2/ammonite/compiler/CompilerLifecycleManager.scala index 9b4a1c248..d8821e6c0 100644 --- a/amm/compiler/src/main/scala-2/ammonite/compiler/CompilerLifecycleManager.scala +++ b/amm/compiler/src/main/scala-2/ammonite/compiler/CompilerLifecycleManager.scala @@ -14,32 +14,30 @@ import scala.collection.mutable import scala.reflect.io.VirtualDirectory import scala.tools.nsc.Settings - /** - * Wraps up the `Compiler` and `Pressy`, ensuring that they get properly - * initialized before use. Mostly deals with ensuring the object lifecycles - * are properly dealt with; `Compiler` and `Pressy` are the ones which deal - * with the compiler's nasty APIs - * - * Exposes a simple API where you can just call methods like `compilerClass` - * `configureCompiler` any-how and not worry about ensuring the necessary - * compiler objects are initialized, or worry about initializing them more - * than necessary - */ + * Wraps up the `Compiler` and `Pressy`, ensuring that they get properly + * initialized before use. Mostly deals with ensuring the object lifecycles + * are properly dealt with; `Compiler` and `Pressy` are the ones which deal + * with the compiler's nasty APIs + * + * Exposes a simple API where you can just call methods like `compilerClass` + * `configureCompiler` any-how and not worry about ensuring the necessary + * compiler objects are initialized, or worry about initializing them more + * than necessary + */ class CompilerLifecycleManager( - rtCacheDir: Option[Path], - headFrame: => ammonite.util.Frame, - dependencyCompleteOpt: => Option[String => (Int, Seq[String])], - classPathWhitelist: Set[Seq[String]], - initialClassLoader: ClassLoader, - val outputDir: Option[Path], - initialSettings: Seq[String] + rtCacheDir: Option[Path], + headFrame: => ammonite.util.Frame, + dependencyCompleteOpt: => Option[String => (Int, Seq[String])], + classPathWhitelist: Set[Seq[String]], + initialClassLoader: ClassLoader, + val outputDir: Option[Path], + initialSettings: Seq[String] ) extends ICompilerLifecycleManager { def scalaVersion = scala.util.Properties.versionNumberString - - private[this] object Internal{ + private[this] object Internal { val dynamicClasspath = new VirtualDirectory("(memory)", None) var compiler: Compiler = null val onCompilerInit = mutable.Buffer.empty[scala.tools.nsc.Global => Unit] @@ -50,10 +48,8 @@ class CompilerLifecycleManager( var (lastFrame, lastFrameVersion) = (headFrame, headFrame.version) } - import Internal._ - // Public to expose it in the REPL so people can poke at it at runtime // Not for use within Ammonite! Use one of the other methods to ensure // that `Internal.compiler` is properly initialized before use. @@ -62,12 +58,11 @@ class CompilerLifecycleManager( def pressy: Pressy = Internal.pressy - def preprocess(fileName: String) = synchronized{ + def preprocess(fileName: String) = synchronized { init() compiler.preprocessor(fileName) } - // We lazily force the compiler to be re-initialized by setting the // compilerStale flag. Otherwise, if we re-initialized the compiler eagerly, // we end up sometimes re-initializing it multiple times unnecessarily before @@ -75,12 +70,14 @@ class CompilerLifecycleManager( // re-initializations by about 2/3, each of which costs about 30ms and // probably creates a pile of garbage - def init(force: Boolean = false) = synchronized{ - if (compiler == null || - (headFrame ne lastFrame) || - headFrame.version != lastFrameVersion || - Internal.preConfiguredSettingsChanged || - force) { + def init(force: Boolean = false) = synchronized { + if ( + compiler == null || + (headFrame ne lastFrame) || + headFrame.version != lastFrameVersion || + Internal.preConfiguredSettingsChanged || + force + ) { lastFrame = headFrame lastFrameVersion = headFrame.version @@ -89,7 +86,8 @@ class CompilerLifecycleManager( // Otherwise activating autocomplete makes the presentation compiler mangle // the shared settings and makes the main compiler sad val settings = Option(compiler).fold(new Settings)(_.compiler.settings.copy) - val (success, trailingSettings) = settings.processArguments(initialSettings.toList, processAll = true) + val (success, trailingSettings) = + settings.processArguments(initialSettings.toList, processAll = true) if (!success) System.err.println(s"Error processing initial settings ${initialSettings.mkString(" ")}") onSettingsInit.foreach(_(settings)) @@ -129,14 +127,16 @@ class CompilerLifecycleManager( } } - def complete(offset: Int, previousImports: String, snippet: String) = synchronized{ + def complete(offset: Int, previousImports: String, snippet: String) = synchronized { init() pressy.complete(offset, previousImports, snippet) } - def compileClass(processed: Preprocessor.Output, - printer: Printer, - fileName: String): Option[ICompiler.Output] = synchronized{ + def compileClass( + processed: Preprocessor.Output, + printer: Printer, + fileName: String + ): Option[ICompiler.Output] = synchronized { // Enforce the invariant that every piece of code Ammonite ever compiles, // gets run within the `ammonite` package. It's further namespaced into // things like `ammonite.$file` or `ammonite.$sess`, but it has to be @@ -155,9 +155,9 @@ class CompilerLifecycleManager( compiled } - def configureCompiler(callback: scala.tools.nsc.Global => Unit) = synchronized{ + def configureCompiler(callback: scala.tools.nsc.Global => Unit) = synchronized { onCompilerInit.append(callback) - if (compiler != null){ + if (compiler != null) { callback(compiler.compiler) } } diff --git a/amm/compiler/src/main/scala-2/ammonite/compiler/DefaultPreprocessor.scala b/amm/compiler/src/main/scala-2/ammonite/compiler/DefaultPreprocessor.scala index cd0030e65..e231ddd54 100644 --- a/amm/compiler/src/main/scala-2/ammonite/compiler/DefaultPreprocessor.scala +++ b/amm/compiler/src/main/scala-2/ammonite/compiler/DefaultPreprocessor.scala @@ -17,31 +17,39 @@ object DefaultPreprocessor { } } -class DefaultPreprocessor(parse: => String => Either[String, Seq[G#Tree]], - markGeneratedSections: Boolean = false) extends Preprocessor{ +class DefaultPreprocessor( + parse: => String => Either[String, Seq[G#Tree]], + markGeneratedSections: Boolean = false +) extends Preprocessor { import DefaultPreprocessor._ - def transform(stmts: Seq[String], - resultIndex: String, - leadingSpaces: String, - codeSource: CodeSource, - indexedWrapperName: Name, - imports: Imports, - printerTemplate: String => String, - extraCode: String, - skipEmpty: Boolean, - markScript: Boolean, - codeWrapper: CodeWrapper) = { + def transform( + stmts: Seq[String], + resultIndex: String, + leadingSpaces: String, + codeSource: CodeSource, + indexedWrapperName: Name, + imports: Imports, + printerTemplate: String => String, + extraCode: String, + skipEmpty: Boolean, + markScript: Boolean, + codeWrapper: CodeWrapper + ) = { // All code Ammonite compiles must be rooted in some package within // the `ammonite` top-level package assert(codeSource.pkgName.head == Name("ammonite")) - for{ + for { Expanded(code, printer) <- expandStatements(stmts, resultIndex, skipEmpty) (wrappedCode, importsLength, userCodeNestingLevel) = codeWrapper.wrapCode( - codeSource, indexedWrapperName, leadingSpaces + code, + codeSource, + indexedWrapperName, + leadingSpaces + code, printerTemplate(printer.mkString(", ")), - imports, extraCode, markScript + imports, + extraCode, + markScript ) } yield Preprocessor.Output(wrappedCode, importsLength, userCodeNestingLevel) } @@ -73,13 +81,12 @@ class DefaultPreprocessor(parse: => String => Either[String, Seq[G#Tree]], def pprint(ident: String) = pprintSignature(ident, None) - /** * Processors for declarations which all have the same shape */ def DefProc(definitionLabel: String)(cond: PartialFunction[G#Tree, G#Name]) = (code: String, name: String, tree: G#Tree) => - cond.lift(tree).map{ name => + cond.lift(tree).map { name => val printer = if (isPrivate(tree)) Nil else Seq(definedStr(definitionLabel, Name.backtickWrap(name.decoded))) @@ -89,16 +96,16 @@ class DefaultPreprocessor(parse: => String => Either[String, Seq[G#Tree]], ) } - val ObjectDef = DefProc("object"){case m: G#ModuleDef => m.name} - val ClassDef = DefProc("class"){ case m: G#ClassDef if !m.mods.isTrait => m.name } - val TraitDef = DefProc("trait"){ case m: G#ClassDef if m.mods.isTrait => m.name } - val DefDef = DefProc("function"){ case m: G#DefDef => m.name } - val TypeDef = DefProc("type"){ case m: G#TypeDef => m.name } + val ObjectDef = DefProc("object") { case m: G#ModuleDef => m.name } + val ClassDef = DefProc("class") { case m: G#ClassDef if !m.mods.isTrait => m.name } + val TraitDef = DefProc("trait") { case m: G#ClassDef if m.mods.isTrait => m.name } + val DefDef = DefProc("function") { case m: G#DefDef => m.name } + val TypeDef = DefProc("type") { case m: G#TypeDef => m.name } val PatVarDef = Processor { case (name, code, t: G#ValDef) => Expanded( - //Only wrap rhs in function if it is not a function - //Wrapping functions causes type inference errors. + // Only wrap rhs in function if it is not a function + // Wrapping functions causes type inference errors. code, // Try to leave out all synthetics; we don't actually have proper // synthetic flags right now, because we're dumb-parsing it and not putting @@ -109,7 +116,7 @@ class DefaultPreprocessor(parse: => String => Either[String, Seq[G#Tree]], ) } - val Import = Processor{ + val Import = Processor { case (name, code, tree: G#Import) => val body = fastparse.parse(code, Parsers.ImportFinder(_)) match { case s: fastparse.Parsed.Success[String] => @@ -118,8 +125,10 @@ class DefaultPreprocessor(parse: => String => Either[String, Seq[G#Tree]], val Array(keyword, body) = code.split(" ", 2) body } - Expanded(code, Seq( - s""" + Expanded( + code, + Seq( + s""" _root_.ammonite .repl .ReplBridge @@ -127,11 +136,12 @@ class DefaultPreprocessor(parse: => String => Either[String, Seq[G#Tree]], .Internal .printImport(${fastparse.internal.Util.literalize(body)}) """ - )) + ) + ) } - val Expr = Processor{ - //Expressions are lifted to anon function applications so they will be JITed + val Expr = Processor { + // Expressions are lifted to anon function applications so they will be JITed case (name, code, tree) => val expandedCode = if (markGeneratedSections) @@ -145,13 +155,22 @@ class DefaultPreprocessor(parse: => String => Either[String, Seq[G#Tree]], } val decls = Seq[(String, String, G#Tree) => Option[Expanded]]( - ObjectDef, ClassDef, TraitDef, DefDef, TypeDef, PatVarDef, Import, Expr + ObjectDef, + ClassDef, + TraitDef, + DefDef, + TypeDef, + PatVarDef, + Import, + Expr ) - def expandStatements(stmts: Seq[String], - wrapperIndex: String, - skipEmpty: Boolean): Res[Expanded] = { - stmts match{ + def expandStatements( + stmts: Seq[String], + wrapperIndex: String, + skipEmpty: Boolean + ): Res[Expanded] = { + stmts match { // In the REPL, we do not process empty inputs at all, to avoid // unnecessarily incrementing the command counter // @@ -166,7 +185,7 @@ class DefaultPreprocessor(parse: => String => Either[String, Seq[G#Tree]], def complete(code: String, resultIndex: String, postSplit: Seq[String]) = { val reParsed = postSplit.map(p => (parse(p), p)) - val errors = reParsed.collect{case (Left(e), _) => e } + val errors = reParsed.collect { case (Left(e), _) => e } if (errors.length != 0) Res.Failure(errors.mkString(newLine)) else { val allDecls = for { @@ -199,8 +218,8 @@ class DefaultPreprocessor(parse: => String => Either[String, Seq[G#Tree]], } } - allDecls match{ - case Seq(first, rest@_*) => + allDecls match { + case Seq(first, rest @ _*) => val allDeclsWithComments = Expanded(first.code, first.printer) +: rest Res( allDeclsWithComments.reduceOption { (a, b) => @@ -220,4 +239,3 @@ class DefaultPreprocessor(parse: => String => Either[String, Seq[G#Tree]], } } } - diff --git a/amm/compiler/src/main/scala-2/ammonite/compiler/Highlighter.scala b/amm/compiler/src/main/scala-2/ammonite/compiler/Highlighter.scala index 5c434ad52..a2f627209 100644 --- a/amm/compiler/src/main/scala-2/ammonite/compiler/Highlighter.scala +++ b/amm/compiler/src/main/scala-2/ammonite/compiler/Highlighter.scala @@ -1,51 +1,51 @@ package ammonite.compiler - import fastparse._, NoWhitespace._ import scalaparse.Scala._ import scalaparse.syntax.Identifiers._ object Highlighter { - object BackTicked{ + object BackTicked { private[this] val regex = "`([^`]+)`".r def unapplySeq(s: String): Option[List[String]] = { regex.unapplySeq(s) } } - - def flattenIndices(boundedIndices: Seq[(Int, fansi.Attrs)], - buffer: Vector[Char]) = { + def flattenIndices(boundedIndices: Seq[(Int, fansi.Attrs)], buffer: Vector[Char]) = { boundedIndices .sliding(2) - .map{case Seq((s, c1), (e, c2)) => + .map { case Seq((s, c1), (e, c2)) => assert(e >= s, s"s: $s e: $e") c1(fansi.Str(SeqCharSequence(buffer.slice(s, e)), errorMode = fansi.ErrorMode.Sanitize)) }.reduce(_ ++ _).render.toVector } - - def defaultHighlight0(parser: P[_] => P[Any], - buffer: Vector[Char], - comment: fansi.Attrs, - `type`: fansi.Attrs, - literal: fansi.Attrs, - keyword: fansi.Attrs, - reset: fansi.Attrs) = { + def defaultHighlight0( + parser: P[_] => P[Any], + buffer: Vector[Char], + comment: fansi.Attrs, + `type`: fansi.Attrs, + literal: fansi.Attrs, + keyword: fansi.Attrs, + reset: fansi.Attrs + ) = { val boundedIndices = defaultHighlightIndices0(parser, buffer, comment, `type`, literal, keyword, reset) flattenIndices(boundedIndices, buffer) } - def defaultHighlightIndices0(parser: P[_] => P[Any], - buffer: Vector[Char], - comment: fansi.Attrs, - `type`: fansi.Attrs, - literal: fansi.Attrs, - keyword: fansi.Attrs, - reset: fansi.Attrs) = Highlighter.highlightIndices( + def defaultHighlightIndices0( + parser: P[_] => P[Any], + buffer: Vector[Char], + comment: fansi.Attrs, + `type`: fansi.Attrs, + literal: fansi.Attrs, + keyword: fansi.Attrs, + reset: fansi.Attrs + ) = Highlighter.highlightIndices( parser, buffer, { @@ -55,69 +55,75 @@ object Highlighter { case "PatLiteral" => literal case "TypeId" => `type` case BackTicked(body) - if parse(body, scalaparse.syntax.Identifiers.AlphabetKeywords(_)).isSuccess => keyword + if parse(body, scalaparse.syntax.Identifiers.AlphabetKeywords(_)).isSuccess => keyword }, reset ) - def highlightIndices[T](parser: P[_] => P[Any], - buffer: Vector[Char], - ruleColors: PartialFunction[String, T], - endColor: T): Seq[(Int, T)] = { + def highlightIndices[T]( + parser: P[_] => P[Any], + buffer: Vector[Char], + ruleColors: PartialFunction[String, T], + endColor: T + ): Seq[(Int, T)] = { val indices = { var indices = collection.mutable.Buffer((0, endColor)) var done = false val input = buffer.mkString val stack = collection.mutable.ArrayBuffer.empty[(T, Int, Int, Boolean)] - val res = parse(input, parser, instrument = new fastparse.internal.Instrument { - def beforeParse(parser: String, index: Int): Unit = { - for(color <- ruleColors.lift(parser)) { - val closeColor = indices.last._2 - val startIndex = indices.length - val newIndex = - index > indices.lastOption.fold(0)(_._1) || - indices.lastOption.map(_._2).contains(endColor) - - if (newIndex) indices += ((index, color)) - stack.append((closeColor, startIndex, index, newIndex)) - } + val res = parse( + input, + parser, + instrument = new fastparse.internal.Instrument { + def beforeParse(parser: String, index: Int): Unit = { + for (color <- ruleColors.lift(parser)) { + val closeColor = indices.last._2 + val startIndex = indices.length + val newIndex = + index > indices.lastOption.fold(0)(_._1) || + indices.lastOption.map(_._2).contains(endColor) + + if (newIndex) indices += ((index, color)) + stack.append((closeColor, startIndex, index, newIndex)) + } - } - def afterParse(parser: String, index: Int, success: Boolean): Unit = { + } + def afterParse(parser: String, index: Int, success: Boolean): Unit = { - for(color <- ruleColors.lift(parser)) { - val (closeColor, startIndex, idx, newIndex) = stack.remove(stack.length - 1) + for (color <- ruleColors.lift(parser)) { + val (closeColor, startIndex, idx, newIndex) = stack.remove(stack.length - 1) - def endCheckParser[_: P] = P(WL ~ End) + def endCheckParser[_: P] = P(WL ~ End) + if (newIndex) { + if (success) { + val prev = indices(startIndex - 1)._1 - if (newIndex) { - if (success) { - val prev = indices(startIndex - 1)._1 + if (idx < prev && index <= prev) { + indices.remove(startIndex, indices.length - startIndex) - if (idx < prev && index <= prev) { - indices.remove(startIndex, indices.length - startIndex) - - } - while (idx < indices.last._1 && index <= indices.last._1) { - indices.remove(indices.length - 1) - } - indices += ((index, closeColor)) - if (index == buffer.length) done = true - } else if ( + } + while (idx < indices.last._1 && index <= indices.last._1) { + indices.remove(indices.length - 1) + } + indices += ((index, closeColor)) + if (index == buffer.length) done = true + } else if ( index == buffer.length && !parse(input, endCheckParser(_), startIndex = startIndex).isSuccess && - index > idx) { - done = true - } else { - indices.remove(startIndex, indices.length - startIndex) + index > idx + ) { + done = true + } else { + indices.remove(startIndex, indices.length - startIndex) + } } + } } - } - }) + ) indices } diff --git a/amm/compiler/src/main/scala-2/ammonite/compiler/Parsers.scala b/amm/compiler/src/main/scala-2/ammonite/compiler/Parsers.scala index 53621e7ff..d66a9b363 100644 --- a/amm/compiler/src/main/scala-2/ammonite/compiler/Parsers.scala +++ b/amm/compiler/src/main/scala-2/ammonite/compiler/Parsers.scala @@ -16,48 +16,42 @@ object Parsers extends IParser { // For some reason Scala doesn't import this by default private def `_`[_: P] = scalaparse.Scala.Underscore - private def ImportExpr[_: P]: P[ammonite.util.ImportTree] = { - def IdParser = P( (Id | `_` ).! ).map( - s => if (s(0) == '`') s.drop(1).dropRight(1) else s - ) - def Selector = P( IdParser ~ (`=>` ~/ IdParser).? ) - def Selectors = P( "{" ~/ Selector.rep(sep = ","./) ~ "}" ) - def BulkImport = P( `_`).map( - _ => Seq("_" -> None) - ) - def Prefix = P( IdParser.rep(1, sep = ".") ) - def Suffix = P( "." ~/ (BulkImport | Selectors) ) + def IdParser = P((Id | `_`).!).map(s => if (s(0) == '`') s.drop(1).dropRight(1) else s) + def Selector = P(IdParser ~ (`=>` ~/ IdParser).?) + def Selectors = P("{" ~/ Selector.rep(sep = ","./) ~ "}") + def BulkImport = P(`_`).map(_ => Seq("_" -> None)) + def Prefix = P(IdParser.rep(1, sep = ".")) + def Suffix = P("." ~/ (BulkImport | Selectors)) // Manually use `WL0` parser here, instead of relying on WhitespaceApi, as // we do not want the whitespace to be consumed even if the WL0 parser parses // to the end of the input (which is the default behavior for WhitespaceApi) - P( Index ~~ Prefix ~~ (WL0 ~~ Suffix).? ~~ Index).map{ + P(Index ~~ Prefix ~~ (WL0 ~~ Suffix).? ~~ Index).map { case (start, idSeq, selectors, end) => ammonite.util.ImportTree(idSeq, selectors, start, end) } } def ImportSplitter[_: P]: P[Seq[ammonite.util.ImportTree]] = - P( WL ~ `import` ~/ ImportExpr.rep(1, sep = ","./) ) + P(WL ~ `import` ~/ ImportExpr.rep(1, sep = ","./)) def ImportFinder[_: P]: P[String] = P(WL ~ `import` ~/ ImportExpr.! ~ End) private def PatVarSplitter[_: P] = { def Prefixes = P(Prelude ~ (`var` | `val`)) - def Lhs = P( Prefixes ~/ BindPattern.rep(1, "," ~/ Pass) ~ (`:` ~/ Type).? ) - P( Lhs.! ~ (`=` ~/ WL ~ StatCtx.Expr.!) ~ End ) + def Lhs = P(Prefixes ~/ BindPattern.rep(1, "," ~/ Pass) ~ (`:` ~/ Type).?) + P(Lhs.! ~ (`=` ~/ WL ~ StatCtx.Expr.!) ~ End) } private def patVarSplit(code: String) = { val Parsed.Success((lhs, rhs), _) = parse(code, PatVarSplitter(_)) (lhs, rhs) } - private def Prelude[_: P] = P( (Annot ~ OneNLMax).rep ~ (Mod ~/ Pass).rep ) - - private def TmplStat[_: P] = P( Import | Prelude ~ BlockDef | StatCtx.Expr ) + private def Prelude[_: P] = P((Annot ~ OneNLMax).rep ~ (Mod ~/ Pass).rep) + private def TmplStat[_: P] = P(Import | Prelude ~ BlockDef | StatCtx.Expr) // Do this funny ~~WS thing to make sure we capture the whitespace // together with each statement; otherwise, by default, it gets discarded. @@ -65,24 +59,24 @@ object Parsers extends IParser { // After each statement, there must either be `Semis`, a "}" marking the // end of the block, or the `End` of the input private def StatementBlock[_: P](blockSep: => P0) = - P( Semis.? ~ (Index ~ (!blockSep ~ TmplStat ~~ WS ~~ (Semis | &("}") | End)).!).repX) + P(Semis.? ~ (Index ~ (!blockSep ~ TmplStat ~~ WS ~~ (Semis | &("}") | End)).!).repX) private def Splitter0[_: P] = P(StatementBlock(Fail)) - private def HighlightSplitter[_: P] = P( ("{" ~ Splitter0 ~ "}" | Splitter0) ~ End ) + private def HighlightSplitter[_: P] = P(("{" ~ Splitter0 ~ "}" | Splitter0) ~ End) def Splitter[_: P] = P(("{" ~~ WL.! ~~ Splitter0 ~ "}" | WL.! ~~ Splitter0) ~ End) - private def ObjParser[_: P] = P( ObjDef ) + private def ObjParser[_: P] = P(ObjDef) /** * Attempts to break a code blob into multiple statements. Returns `None` if * it thinks the code blob is "incomplete" and requires more input */ def split( - code: String, - ignoreIncomplete: Boolean, - fileName: String + code: String, + ignoreIncomplete: Boolean, + fileName: String ): Option[Either[String, Seq[String]]] = if (ignoreIncomplete) { // We use `instrument` to detect when the parser has reached the end of the @@ -96,11 +90,11 @@ object Parsers extends IParser { } } - parse(code, Splitter(_), instrument = instrument) match{ + parse(code, Splitter(_), instrument = instrument) match { case Parsed.Failure(_, index, extra) if furthest == code.length => None case f @ Parsed.Failure(_, _, _) => Some(Left( - formatFastparseError(fileName, code, f) - )) + formatFastparseError(fileName, code, f) + )) case Parsed.Success(value, index) => { val (str, seq) = value if (seq.isEmpty) { @@ -111,10 +105,10 @@ object Parsers extends IParser { } } } else - parse(code, Splitter(_)) match{ + parse(code, Splitter(_)) match { case f @ Parsed.Failure(_, _, _) => Some(Left( - formatFastparseError(fileName, code, f) - )) + formatFastparseError(fileName, code, f) + )) case Parsed.Success(value, index) => { val (str, seq) = value if (seq.isEmpty) { @@ -131,44 +125,46 @@ object Parsers extends IParser { .fold((_, _, _) => false, (_, _) => true) } - private def Separator[_: P] = P( WL ~ "@" ~~ CharIn(" \n\r").rep(1) ) - private def CompilationUnit[_: P] = P( WL.! ~ StatementBlock(Separator) ~ WL ) - private def ScriptSplitter[_: P] = P( CompilationUnit.repX(1, Separator) ~ End) + private def Separator[_: P] = P(WL ~ "@" ~~ CharIn(" \n\r").rep(1)) + private def CompilationUnit[_: P] = P(WL.! ~ StatementBlock(Separator) ~ WL) + private def ScriptSplitter[_: P] = P(CompilationUnit.repX(1, Separator) ~ End) def splitScript(code: String): Parsed[Seq[(String, Seq[(Int, String)])]] = parse(code, ScriptSplitter(_)) private def ScriptSplitterWithStart[_: P] = - P( Start ~ (Index ~ CompilationUnit).repX(1, Separator) ~ End) + P(Start ~ (Index ~ CompilationUnit).repX(1, Separator) ~ End) def splitScriptWithStart(code: String): Parsed[Seq[(Int, (String, Seq[(Int, String)]))]] = parse(code, ScriptSplitterWithStart(_)) def stringWrap(s: String): String = "\"" + pprint.Util.literalize(s) + "\"" def stringSymWrap(s: String): String = { - def idToEnd[_: P] = P( scalaparse.syntax.Identifiers.Id ~ End ) + def idToEnd[_: P] = P(scalaparse.syntax.Identifiers.Id ~ End) if (s == "") "'" - else parse(s, idToEnd(_)) match{ - case Parsed.Success(v, _) => "'" + s + else parse(s, idToEnd(_)) match { + case Parsed.Success(v, _) => "'" + s case f: Parsed.Failure => stringWrap(s) } } def parseImportHooksWithIndices( - source: CodeSource, - stmts: Seq[(Int, String)] - ): (Seq[String], Seq[ImportTree]) = synchronized{ + source: CodeSource, + stmts: Seq[(Int, String)] + ): (Seq[String], Seq[ImportTree]) = synchronized { val hookedStmts = mutable.Buffer.empty[String] val importTrees = mutable.Buffer.empty[ImportTree] - for((startIdx, stmt) <- stmts) { + for ((startIdx, stmt) <- stmts) { // Call `fastparse.ParserInput.fromString` explicitly, to avoid generating a // lambda in the class body and making the we-do-not-load-fastparse-on-cached-scripts // test fail - parse(fastparse.ParserInput.fromString(stmt), ImportSplitter(_)) match{ + parse(fastparse.ParserInput.fromString(stmt), ImportSplitter(_)) match { case f: Parsed.Failure => hookedStmts.append(stmt) case Parsed.Success(parsedTrees, _) => var currentStmt = stmt - for(importTree <- parsedTrees){ + for (importTree <- parsedTrees) { if (importTree.prefix(0)(0) == '$') { val length = importTree.end - importTree.start currentStmt = currentStmt.patch( - importTree.start, (importTree.prefix(0) + ".$").padTo(length, ' '), length + importTree.start, + (importTree.prefix(0) + ".$").padTo(length, ' '), + length ) val importTree0 = importTree.copy( start = startIdx + importTree.start, @@ -186,50 +182,48 @@ object Parsers extends IParser { val lineColIndex = f.extra.input.prettyIndex(f.index) val expected = f.trace().failure.label - val locationString = { - val (first, last) = rawCode.splitAt(f.index) - val lastSnippet = last.split(newLine).headOption.getOrElse("") - val firstSnippet = first.reverse - .split(newLine.reverse) - .lift(0).getOrElse("").reverse - firstSnippet + lastSnippet + newLine + (" " * firstSnippet.length) + "^" - } + val locationString = { + val (first, last) = rawCode.splitAt(f.index) + val lastSnippet = last.split(newLine).headOption.getOrElse("") + val firstSnippet = first.reverse + .split(newLine.reverse) + .lift(0).getOrElse("").reverse + firstSnippet + lastSnippet + newLine + (" " * firstSnippet.length) + "^" + } s"$fileName:$lineColIndex expected $expected$newLine$locationString" } - /** - * Splits up a script file into its constituent blocks, each of which - * is a tuple of (leading-whitespace, statements). Leading whitespace - * is returned separately so we can later manipulate the statements e.g. - * by adding `val res2 = ` without the whitespace getting in the way - */ + * Splits up a script file into its constituent blocks, each of which + * is a tuple of (leading-whitespace, statements). Leading whitespace + * is returned separately so we can later manipulate the statements e.g. + * by adding `val res2 = ` without the whitespace getting in the way + */ def splitScript( - rawCode: String, - fileName: String + rawCode: String, + fileName: String ): Either[String, IndexedSeq[(String, Seq[String])]] = { parse(rawCode, ScriptSplitter(_)) match { case f: Parsed.Failure => Left(formatFastparseError(fileName, rawCode, f)) case s: Parsed.Success[Seq[(String, Seq[(Int, String)])]] => - var offset = 0 val blocks = mutable.ArrayBuffer[(String, Seq[String])]() // comment holds comments or empty lines above the code which is not caught along with code - for( (comment, codeWithStartIdx) <- s.value){ + for ((comment, codeWithStartIdx) <- s.value) { val code = codeWithStartIdx.map(_._2) - //ncomment has required number of newLines appended based on OS and offset - //since fastparse has hardcoded `\n`s, while parsing strings with `\r\n`s it - //gives out one extra `\r` after '@' i.e. block change - //which needs to be removed to get correct line number (It adds up one extra line) - //thats why the `comment.substring(1)` thing is necessary + // ncomment has required number of newLines appended based on OS and offset + // since fastparse has hardcoded `\n`s, while parsing strings with `\r\n`s it + // gives out one extra `\r` after '@' i.e. block change + // which needs to be removed to get correct line number (It adds up one extra line) + // thats why the `comment.substring(1)` thing is necessary val ncomment = - if(windowsPlatform && blocks.nonEmpty && !comment.isEmpty){ + if (windowsPlatform && blocks.nonEmpty && !comment.isEmpty) { comment.substring(1) + newLine * offset - }else{ + } else { comment + newLine * offset } @@ -244,8 +238,8 @@ object Parsers extends IParser { } def splitScriptWithStart( - rawCode: String, - fileName: String + rawCode: String, + fileName: String ): Either[Parsed.Failure, IndexedSeq[(Int, String, Seq[(Int, String)])]] = { Parsers.splitScriptWithStart(rawCode) match { case f: Parsed.Failure => @@ -261,8 +255,8 @@ object Parsers extends IParser { } def scriptBlocksWithStartIndices( - rawCode: String, - fileName: String + rawCode: String, + fileName: String ): Either[IParser.ScriptSplittingError, Seq[IParser.ScriptBlock]] = { splitScriptWithStart(rawCode, fileName) match { case Left(f) => @@ -280,26 +274,46 @@ object Parsers extends IParser { } } - def defaultHighlight(buffer: Vector[Char], - comment: fansi.Attrs, - `type`: fansi.Attrs, - literal: fansi.Attrs, - keyword: fansi.Attrs, - notImplemented: fansi.Attrs, - reset: fansi.Attrs) = { - Highlighter.defaultHighlight0(HighlightSplitter(_), buffer, comment, `type`, literal, keyword, reset) + def defaultHighlight( + buffer: Vector[Char], + comment: fansi.Attrs, + `type`: fansi.Attrs, + literal: fansi.Attrs, + keyword: fansi.Attrs, + notImplemented: fansi.Attrs, + reset: fansi.Attrs + ) = { + Highlighter.defaultHighlight0( + HighlightSplitter(_), + buffer, + comment, + `type`, + literal, + keyword, + reset + ) } - def defaultHighlightIndices(buffer: Vector[Char], - comment: fansi.Attrs, - `type`: fansi.Attrs, - literal: fansi.Attrs, - keyword: fansi.Attrs, - reset: fansi.Attrs) = Highlighter.defaultHighlightIndices0( - HighlightSplitter(_), buffer, comment, `type`, literal, keyword, reset + def defaultHighlightIndices( + buffer: Vector[Char], + comment: fansi.Attrs, + `type`: fansi.Attrs, + literal: fansi.Attrs, + keyword: fansi.Attrs, + reset: fansi.Attrs + ) = Highlighter.defaultHighlightIndices0( + HighlightSplitter(_), + buffer, + comment, + `type`, + literal, + keyword, + reset ) - def highlightIndices[T](buffer: Vector[Char], - ruleColors: PartialFunction[String, T], - endColor: T): Seq[(Int, T)] = + def highlightIndices[T]( + buffer: Vector[Char], + ruleColors: PartialFunction[String, T], + endColor: T + ): Seq[(Int, T)] = Highlighter.highlightIndices(Parsers.HighlightSplitter(_), buffer, ruleColors, endColor) } diff --git a/amm/compiler/src/main/scala-2/ammonite/compiler/Pressy.scala b/amm/compiler/src/main/scala-2/ammonite/compiler/Pressy.scala index c2e479735..c8c0b6e4f 100644 --- a/amm/compiler/src/main/scala-2/ammonite/compiler/Pressy.scala +++ b/amm/compiler/src/main/scala-2/ammonite/compiler/Pressy.scala @@ -15,22 +15,26 @@ import ammonite.util.Util.newLine import scala.tools.nsc.interactive.{Global => InteractiveGlobal} import scala.tools.nsc.classpath.AggregateClassPath import scala.tools.nsc.reporters.Reporter + /** * Nice wrapper for the presentation compiler. */ -trait Pressy{ +trait Pressy { + /** - * Ask for autocompletion at a particular spot in the code, returning - * possible things that can be completed at that location. May try various - * different completions depending on where the `index` is placed, but - * the outside caller probably doesn't care. - * - * The returned index gives the position from which the possible replacements - * should be inserted. - */ - def complete(snippetIndex: Int, - previousImports: String, - snippet: String): (Int, Seq[String], Seq[String]) + * Ask for autocompletion at a particular spot in the code, returning + * possible things that can be completed at that location. May try various + * different completions depending on where the `index` is placed, but + * the outside caller probably doesn't care. + * + * The returned index gives the position from which the possible replacements + * should be inserted. + */ + def complete( + snippetIndex: Int, + previousImports: String, + snippet: String + ): (Int, Seq[String], Seq[String]) def compiler: nsc.interactive.Global def shutdownPressy(): Unit } @@ -40,11 +44,13 @@ object Pressy { * Encapsulates all the logic around a single instance of * `nsc.interactive.Global` and other data specific to a single completion */ - class Run(val pressy: nsc.interactive.Global, - currentFile: BatchSourceFile, - dependencyCompleteOpt: Option[String => (Int, Seq[String])], - allCode: String, - index: Int){ + class Run( + val pressy: nsc.interactive.Global, + currentFile: BatchSourceFile, + dependencyCompleteOpt: Option[String => (Int, Seq[String])], + allCode: String, + index: Int + ) { val blacklistedPackages = Set("shaded") @@ -113,6 +119,7 @@ object Pressy { val r = new Response[pressy.Tree] pressy.askTypeAt(new OffsetPosition(currentFile, index), r) val tree = r.get.fold(x => x, e => throw e) + /** * Search for terms to autocomplete not just from the local scope, * but from any packages and package objects accessible from the @@ -146,15 +153,15 @@ object Pressy { } def handleTypeCompletion(position: Int, decoded: String, offset: Int) = { - val r = ask(position, pressy.askTypeCompletion) + val r = ask(position, pressy.askTypeCompletion) val prefix = if (decoded == "") "" else decoded (position + offset, handleCompletion(r, prefix)) } - def handleCompletion(r: List[pressy.Member], prefix: String) = pressy.ask{ () => + def handleCompletion(r: List[pressy.Member], prefix: String) = pressy.ask { () => r.filter(_.sym.name.decoded.startsWith(prefix)) .filter(m => !blacklisted(m.sym)) - .map{ x => + .map { x => ( memberToString(x), if (x.sym.name.decoded != prefix) None @@ -165,44 +172,43 @@ object Pressy { def prefixed: (Int, Seq[(String, Option[String])]) = tree match { case t @ pressy.Select(qualifier, name) => - val dotOffset = if (qualifier.pos.point == t.pos.point) 0 else 1 - //In scala 2.10.x if we call pos.end on a scala.reflect.internal.util.Position - //that is not a range, a java.lang.UnsupportedOperationException is thrown. - //We check here if Position is a range before calling .end on it. - //This is not needed for scala 2.11.x. + // In scala 2.10.x if we call pos.end on a scala.reflect.internal.util.Position + // that is not a range, a java.lang.UnsupportedOperationException is thrown. + // We check here if Position is a range before calling .end on it. + // This is not needed for scala 2.11.x. if (qualifier.pos.isRange) { handleTypeCompletion(qualifier.pos.end, name.decoded, dotOffset) } else { - //not prefixed + // not prefixed (0, Seq.empty) } - case t @ pressy.Import(expr, selectors) => + case t @ pressy.Import(expr, selectors) => // If the selectors haven't been defined yet... if (selectors.head.name.toString == "") { if (expr.tpe.toString == "") { - // If the expr is badly typed, try to scope complete it - if (expr.isInstanceOf[pressy.Ident]) { - val exprName = expr.asInstanceOf[pressy.Ident].name.decoded - val pos = - // Without the first case, things like `import ` are - // returned a wrong position. - if (exprName == "") expr.pos.point - 1 - else expr.pos.point - pos -> handleCompletion( - ask(expr.pos.point, pressy.askScopeCompletion), - // if it doesn't have a name at all, accept anything - if (exprName == "") "" else exprName - ) - } else (expr.pos.point, Seq.empty) + // If the expr is badly typed, try to scope complete it + if (expr.isInstanceOf[pressy.Ident]) { + val exprName = expr.asInstanceOf[pressy.Ident].name.decoded + val pos = + // Without the first case, things like `import ` are + // returned a wrong position. + if (exprName == "") expr.pos.point - 1 + else expr.pos.point + pos -> handleCompletion( + ask(expr.pos.point, pressy.askScopeCompletion), + // if it doesn't have a name at all, accept anything + if (exprName == "") "" else exprName + ) + } else (expr.pos.point, Seq.empty) } else { // If the expr is well typed, type complete // the next thing handleTypeCompletion(expr.pos.end, "", 1) } - }else { + } else { val isImportIvy = expr.isInstanceOf[pressy.Ident] && expr.asInstanceOf[pressy.Ident].name.decoded == "$ivy" val selector = selectors @@ -247,12 +253,12 @@ object Pressy { index -> pressy.ask(() => comps.filter(m => !blacklisted(m.sym)) - .map { s => (memberToString(s), None) } + .map { s => (memberToString(s), None) } ) } def ask(index: Int, query: (Position, Response[List[pressy.Member]]) => Unit) = { val position = new OffsetPosition(currentFile, index) - //if a match can't be found awaitResponse throws an Exception. + // if a match can't be found awaitResponse throws an Exception. val result = Try(Compiler.awaitResponse[List[pressy.Member]](query(position, _))) result match { case Success(scopes) => scopes.filter(_.accessible) @@ -261,13 +267,15 @@ object Pressy { } } - def apply(classpath: Seq[java.net.URL], - dynamicClasspath: VirtualDirectory, - evalClassloader: => ClassLoader, - settings: Settings, - dependencyCompleteOpt: => Option[String => (Int, Seq[String])], - classPathWhitelist: Set[Seq[String]], - initialClassPath: Seq[java.net.URL]): Pressy = new Pressy { + def apply( + classpath: Seq[java.net.URL], + dynamicClasspath: VirtualDirectory, + evalClassloader: => ClassLoader, + settings: Settings, + dependencyCompleteOpt: => Option[String => (Int, Seq[String])], + classPathWhitelist: Set[Seq[String]], + initialClassPath: Seq[java.net.URL] + ): Pressy = new Pressy { @volatile var cachedPressy: nsc.interactive.Global = null @@ -275,10 +283,12 @@ object Pressy { if (cachedPressy == null) cachedPressy = initPressy cachedPressy } - def initInteractiveGlobal(settings: Settings, - reporter: Reporter, - jcp: AggregateClassPath, - evalClassloader: ClassLoader): InteractiveGlobal = { + def initInteractiveGlobal( + settings: Settings, + reporter: Reporter, + jcp: AggregateClassPath, + evalClassloader: ClassLoader + ): InteractiveGlobal = { new nsc.interactive.Global(settings, reporter) { g => // Actually jcp, avoiding a path-dependent type issue in 2.10 here override def classPath = jcp @@ -295,7 +305,7 @@ object Pressy { def initPressy = { val (dirDeps, jarDeps) = classpath.partition { u => u.getProtocol == "file" && - java.nio.file.Files.isDirectory(java.nio.file.Paths.get(u.toURI)) + java.nio.file.Files.isDirectory(java.nio.file.Paths.get(u.toURI)) } val jcp = Compiler.initGlobalClasspath( dirDeps, @@ -318,7 +328,8 @@ object Pressy { val pressy = compiler val currentFile = new BatchSourceFile( Compiler.makeFile(allCode.getBytes, name = "Current.sc"), - allCode) + allCode + ) val r = new Response[Unit] pressy.askReload(List(currentFile), r) diff --git a/amm/compiler/src/main/scala-2/ammonite/compiler/internal/CustomURLZipArchive.scala b/amm/compiler/src/main/scala-2/ammonite/compiler/internal/CustomURLZipArchive.scala index 0459ad8f3..3c52e1a01 100644 --- a/amm/compiler/src/main/scala-2/ammonite/compiler/internal/CustomURLZipArchive.scala +++ b/amm/compiler/src/main/scala-2/ammonite/compiler/internal/CustomURLZipArchive.scala @@ -16,30 +16,29 @@ final class CustomURLZipArchive(val url: java.net.URL) extends AbstractFile with def file: java.io.File = null - private def dirPath(path: String) = path.split('/').toSeq.filter(_.nonEmpty) - private def dirName(path: String) = splitPath(path, front = true) + private def dirPath(path: String) = path.split('/').toSeq.filter(_.nonEmpty) + private def dirName(path: String) = splitPath(path, front = true) private def baseName(path: String) = splitPath(path, front = false) private def splitPath(path0: String, front: Boolean): String = { val isDir = path0.charAt(path0.length - 1) == '/' - val path = if (isDir) path0.substring(0, path0.length - 1) else path0 - val idx = path.lastIndexOf('/') + val path = if (isDir) path0.substring(0, path0.length - 1) else path0 + val idx = path.lastIndexOf('/') if (idx < 0) if (front) "/" else path - else - if (front) path.substring(0, idx + 1) + else if (front) path.substring(0, idx + 1) else path.substring(idx + 1) } override def underlyingSource = Some(this) def isDirectory = true def lookupName(name: String, directory: Boolean) = unsupported() def lookupNameUnchecked(name: String, directory: Boolean) = unsupported() - def create() = unsupported() - def delete() = unsupported() - def output = unsupported() + def create() = unsupported() + def delete() = unsupported() + def output = unsupported() def container = unsupported() - def absolute = unsupported() + def absolute = unsupported() abstract class Entry(path: String) extends VirtualFile(baseName(path), path) { // have to keep this name for compat with sbt's compiler-interface @@ -85,35 +84,35 @@ final class CustomURLZipArchive(val url: java.net.URL) extends AbstractFile with } private def ensureDir( - dirs: collection.mutable.Map[Seq[String], DirEntry], - path: String, - zipEntry: ZipEntry + dirs: collection.mutable.Map[Seq[String], DirEntry], + path: String, + zipEntry: ZipEntry ): DirEntry = dirs.get(dirPath(path)) match { case Some(v) => v case None => val parent = ensureDir(dirs, dirName(path), null) - val dir = new DirEntry(path) + val dir = new DirEntry(path) parent.entries(baseName(path)) = dir dirs(dirPath(path)) = dir dir } private def getDir( - dirs: collection.mutable.Map[Seq[String], DirEntry], - entry: ZipEntry + dirs: collection.mutable.Map[Seq[String], DirEntry], + entry: ZipEntry ): DirEntry = { if (entry.isDirectory) ensureDir(dirs, entry.getName, entry) else ensureDir(dirs, dirName(entry.getName), null) } - private lazy val dirs: Map[Seq[String], DirEntry] = { - val root = new DirEntry("/") - val dirs = collection.mutable.HashMap[Seq[String], DirEntry](Nil -> root) - val in = new ZipInputStream(new ByteArrayInputStream(Streamable.bytes(input))) + val root = new DirEntry("/") + val dirs = collection.mutable.HashMap[Seq[String], DirEntry](Nil -> root) + val in = new ZipInputStream(new ByteArrayInputStream(Streamable.bytes(input))) - @annotation.tailrec def loop() { + @annotation.tailrec + def loop() { val zipEntry = in.getNextEntry if (zipEntry != null) { @@ -125,7 +124,7 @@ final class CustomURLZipArchive(val url: java.net.URL) extends AbstractFile with else { val content = { val baos = new java.io.ByteArrayOutputStream - val b = Array.ofDim[Byte](16*1024) + val b = Array.ofDim[Byte](16 * 1024) def loop() { val read = in.read(b, 0, b.length) @@ -154,8 +153,8 @@ final class CustomURLZipArchive(val url: java.net.URL) extends AbstractFile with def iterator: Iterator[AbstractFile] = dirs(Nil).iterator - def name = url.getFile - def path = url.getPath + def name = url.getFile + def path = url.getPath def input = url.openStream() def lastModified = try url.openConnection().getLastModified @@ -164,11 +163,10 @@ final class CustomURLZipArchive(val url: java.net.URL) extends AbstractFile with override def canEqual(other: Any) = other.isInstanceOf[CustomURLZipArchive] override def hashCode() = url.hashCode override def equals(that: Any) = that match { - case x: CustomURLZipArchive=> url == x.url - case _ => false + case x: CustomURLZipArchive => url == x.url + case _ => false } - def allDirsByDottedName: collection.Map[String, DirEntry] = { dirs.map { case (k, v) => @@ -178,7 +176,7 @@ final class CustomURLZipArchive(val url: java.net.URL) extends AbstractFile with } -object CustomURLZipArchive{ +object CustomURLZipArchive { def closeZipFile = false private val is2_12_11 = scala.util.Properties.versionNumberString == "2.12.11" } diff --git a/amm/compiler/src/main/scala-2/ammonite/compiler/tools/HighlightJava.scala b/amm/compiler/src/main/scala-2/ammonite/compiler/tools/HighlightJava.scala index c9b372f56..e4526f858 100644 --- a/amm/compiler/src/main/scala-2/ammonite/compiler/tools/HighlightJava.scala +++ b/amm/compiler/src/main/scala-2/ammonite/compiler/tools/HighlightJava.scala @@ -8,7 +8,7 @@ import sourcecode.Compat.Context import scala.collection.mutable -object HighlightJava{ +object HighlightJava { def highlightJavaCode(sourceCode: String, colors: CodeColors) = { import collection.JavaConverters._ @@ -20,29 +20,29 @@ object HighlightJava{ val lineCounts = Predef.augmentString(sourceCode).lines.map(_.length).toArray def positionToOffset(p: com.github.javaparser.Position) = { - lineCounts.iterator.take(p.line - 1).sum + (p.line-1) + (p.column - 1) + lineCounts.iterator.take(p.line - 1).sum + (p.line - 1) + (p.column - 1) } if (!parsed.isPresent) fansi.Str(sourceCode) else { val indices = mutable.Buffer[(Int, fansi.Attrs)]((0, fansi.Attr.Reset)) - for(token <- parsed.get.asScala){ + for (token <- parsed.get.asScala) { import GeneratedJavaParserConstants._ val colorOpt = - token.getKind match{ + token.getKind match { case INTEGER_LITERAL | LONG_LITERAL | FLOATING_POINT_LITERAL | - STRING_LITERAL | TRUE | FALSE | NULL => Some(colors.literal) + STRING_LITERAL | TRUE | FALSE | NULL => Some(colors.literal) // https://en.wikipedia.org/wiki/List_of_Java_keywords case ABSTRACT | ASSERT | BOOLEAN | BREAK | BYTE | CASE | - CATCH | CHAR | CLASS | CONST | CONTINUE | 49 /*_DEFAULT*/ | - DO | DOUBLE | ELSE | ENUM | EXTENDS | FINAL | FINALLY | - FLOAT | FOR | GOTO | IF | IMPLEMENTS | IMPORT | INSTANCEOF | - INT | INTERFACE | LONG | NATIVE | NEW | PACKAGE | PRIVATE | - PROTECTED | PUBLIC | RETURN | SHORT | STATIC | STRICTFP | - SUPER | SWITCH | SYNCHRONIZED | THIS | THROW | THROWS | - TRANSIENT | TRY | VOID | VOLATILE | WHILE => Some(colors.keyword) + CATCH | CHAR | CLASS | CONST | CONTINUE | 49 /*_DEFAULT*/ | + DO | DOUBLE | ELSE | ENUM | EXTENDS | FINAL | FINALLY | + FLOAT | FOR | GOTO | IF | IMPLEMENTS | IMPORT | INSTANCEOF | + INT | INTERFACE | LONG | NATIVE | NEW | PACKAGE | PRIVATE | + PROTECTED | PUBLIC | RETURN | SHORT | STATIC | STRICTFP | + SUPER | SWITCH | SYNCHRONIZED | THIS | THROW | THROWS | + TRANSIENT | TRY | VOID | VOLATILE | WHILE => Some(colors.keyword) case SINGLE_LINE_COMMENT | MULTI_LINE_COMMENT | JAVA_DOC_COMMENT => Some(colors.comment) @@ -55,7 +55,7 @@ object HighlightJava{ case _ => None } - for(color <- colorOpt){ + for (color <- colorOpt) { indices.append(( positionToOffset(token.getRange.begin), @@ -75,4 +75,4 @@ object HighlightJava{ fansi.Str(SeqCharSequence(flattenIndices(indices.toSeq, sourceCode.toVector))) } } -} \ No newline at end of file +} diff --git a/amm/compiler/src/main/scala-2/ammonite/compiler/tools/SourceRuntime.scala b/amm/compiler/src/main/scala-2/ammonite/compiler/tools/SourceRuntime.scala index 400718974..3e6f31bc6 100644 --- a/amm/compiler/src/main/scala-2/ammonite/compiler/tools/SourceRuntime.scala +++ b/amm/compiler/src/main/scala-2/ammonite/compiler/tools/SourceRuntime.scala @@ -2,7 +2,6 @@ package ammonite.compiler.tools import javassist.{ByteArrayClassPath, CtClass, CtMethod} - import ammonite.compiler.Highlighter import ammonite.runtime.tools.browse.Strings import ammonite.util.CodeColors @@ -10,33 +9,35 @@ import ammonite.util.Util.{Location, newLine} import scala.collection.mutable import scala.language.experimental.macros -object SourceRuntime{ +object SourceRuntime { def defaultPPrinter = pprint.PPrinter.Color.copy(defaultHeight = Int.MaxValue) - def browseSourceCommand(targetLine: Int) = Seq("less", "+" + targetLine,"-RMN") + def browseSourceCommand(targetLine: Int) = Seq("less", "+" + targetLine, "-RMN") /** - * Pull the height from the pretty-printer as a heuristic to shift the - * desired line towards the middle of the screen. Typically, the - * pretty-printer's default height is about half the height of the window, - * so this centers the target line vertically. There is some random - * variation due to the way we're getting line numbers from bytecode, so - * hopefully centering it will help ensure the *actual* desired line is - * visible even if the line number we're aiming for is inaccurate - */ + * Pull the height from the pretty-printer as a heuristic to shift the + * desired line towards the middle of the screen. Typically, the + * pretty-printer's default height is about half the height of the window, + * so this centers the target line vertically. There is some random + * variation due to the way we're getting line numbers from bytecode, so + * hopefully centering it will help ensure the *actual* desired line is + * visible even if the line number we're aiming for is inaccurate + */ def getOffset(p: pprint.PPrinter) = p.defaultHeight /** - * Note: `value` must be a concrete, instantiated value with a concrete class, - * and cannot be an interface or abstract class. We make use of line numbers - * from the bytecode to decide which source to show, and those only exist - * for concrete method implementations - */ - def browseObject(value: Any, - pprinter: pprint.PPrinter, - colors: CodeColors, - command: Int => Strings) = { + * Note: `value` must be a concrete, instantiated value with a concrete class, + * and cannot be an interface or abstract class. We make use of line numbers + * from the bytecode to decide which source to show, and those only exist + * for concrete method implementations + */ + def browseObject( + value: Any, + pprinter: pprint.PPrinter, + colors: CodeColors, + command: Int => Strings + ) = { browseSource( loadObjectInfo(value), pprinter.defaultHeight, @@ -58,28 +59,32 @@ object SourceRuntime{ .filter(_ != -1) try Right(firstLines.min) - catch{ case e: UnsupportedOperationException => - Left("Unable to find line number of class " + value.getClass) + catch { + case e: UnsupportedOperationException => + Left("Unable to find line number of class " + value.getClass) } } ) } + /** - * Note: `value` must be a concrete, instantiated value with a concrete class, - * and cannot be an interface or abstract class. We make use of line numbers - * from the bytecode to decide which source to show, and those only exist - * for concrete method implementations - */ - def browseObjectMember(symbolOwnerCls: Class[_], - value: Option[Any], - memberName: String, - pprinter: pprint.PPrinter, - colors: CodeColors, - command: Int => Strings, - returnType: Class[_], - argTypes: Class[_]*) = { + * Note: `value` must be a concrete, instantiated value with a concrete class, + * and cannot be an interface or abstract class. We make use of line numbers + * from the bytecode to decide which source to show, and those only exist + * for concrete method implementations + */ + def browseObjectMember( + symbolOwnerCls: Class[_], + value: Option[Any], + memberName: String, + pprinter: pprint.PPrinter, + colors: CodeColors, + command: Int => Strings, + returnType: Class[_], + argTypes: Class[_]* + ) = { browseSource( - loadObjectMemberInfo(symbolOwnerCls, value, memberName, returnType, argTypes:_*), + loadObjectMemberInfo(symbolOwnerCls, value, memberName, returnType, argTypes: _*), pprinter.defaultHeight, colors, command @@ -89,9 +94,9 @@ object SourceRuntime{ def getDesc(argTypes: Seq[Class[_]], returnType: Class[_]): String = { // https://docs.oracle.com/javase/specs/jvms/se7/html/jvms-4.html#jvms-4.3 def unparse(t: Class[_]): String = { - t match{ + t match { case t if t.isPrimitive => - t.toString match{ + t.toString match { case "void" => "V" case "boolean" => "Z" case "byte" => "B" @@ -111,45 +116,48 @@ object SourceRuntime{ } /** - * A hacky way to try and find a "good" source location for a function, - * about as good as we can probably get without a huge amount more effort: - * - * - We rely on the bytecode line numbers to locate methods; unfortunately, - * this only works for concrete, non-abstract methods! But it's the best - * we're going to get short of parsing all the source code ourselves - * - * - We look at the class that's the "owner" of the Scala symbol at compile - * time. This is based on the static type of the value; this *may* be an - * abstract method. If it's concrete, we can use it's bytecode line numbers - * to find it and we're done - * - * - If it's abstract, we then look at the class that's the java.reflect - * DeclaringClass of the value's method, at runtime. This may still not - * find the actual location (if the method comes from a trait, it'll - * give us the class implementing the trait, rather than the trait itself) - * but it gives us another chance at finding the concrete implementation. - * - * Again, this means it is important there is a concrete `value` that has - * the method we're looking for, since we're relying on the bytecode line - * numbers to find the method, which only exist in concrete methods. - */ - def loadObjectMemberInfo(symbolOwnerCls: Class[_], - value: Option[Any], - memberName: String, - returnType: Class[_], - argTypes: Class[_]*) = { + * A hacky way to try and find a "good" source location for a function, + * about as good as we can probably get without a huge amount more effort: + * + * - We rely on the bytecode line numbers to locate methods; unfortunately, + * this only works for concrete, non-abstract methods! But it's the best + * we're going to get short of parsing all the source code ourselves + * + * - We look at the class that's the "owner" of the Scala symbol at compile + * time. This is based on the static type of the value; this *may* be an + * abstract method. If it's concrete, we can use it's bytecode line numbers + * to find it and we're done + * + * - If it's abstract, we then look at the class that's the java.reflect + * DeclaringClass of the value's method, at runtime. This may still not + * find the actual location (if the method comes from a trait, it'll + * give us the class implementing the trait, rather than the trait itself) + * but it gives us another chance at finding the concrete implementation. + * + * Again, this means it is important there is a concrete `value` that has + * the method we're looking for, since we're relying on the bytecode line + * numbers to find the method, which only exist in concrete methods. + */ + def loadObjectMemberInfo( + symbolOwnerCls: Class[_], + value: Option[Any], + memberName: String, + returnType: Class[_], + argTypes: Class[_]* + ) = { val desc = getDesc(argTypes, returnType) - (loadSourceFrom(symbolOwnerCls, memberName, desc), value) match{ + (loadSourceFrom(symbolOwnerCls, memberName, desc), value) match { case (Right(loc), _) if loc.lineNum != -1 => Right(loc) case (Left(e1), None) => Left(e1) case (Left(e1), Some(v)) => - try{ - val concreteCls = v.getClass.getMethod(memberName, argTypes:_*).getDeclaringClass + try { + val concreteCls = v.getClass.getMethod(memberName, argTypes: _*).getDeclaringClass loadSourceFrom(concreteCls, memberName, desc) - }catch{case e: NoSuchMethodException => - Left(s"$e1${newLine}Unable to find method${value.getClass.getName}#$memberName") + } catch { + case e: NoSuchMethodException => + Left(s"$e1${newLine}Unable to find method${value.getClass.getName}#$memberName") } } } @@ -159,12 +167,12 @@ object SourceRuntime{ x => { val lineNum = try Right(x.getMethod(memberName, desc).getMethodInfo.getLineNumber(0)) - catch{case e: javassist.NotFoundException => Left(e.getMessage)} + catch { case e: javassist.NotFoundException => Left(e.getMessage) } - lineNum match{ + lineNum match { case Left(e) => Left(e) case Right(n) if n != -1 => Right(n) - case _ => Left("Cannot find line number of method " + cls.getName + "#"+ memberName) + case _ => Left("Cannot find line number of method " + cls.getName + "#" + memberName) } } ) @@ -177,40 +185,48 @@ object SourceRuntime{ pool.get(runtimeCls.getName) } - def loadSource(runtimeCls: Class[_], - getLineNumber: CtClass => Either[String, Int]): Either[String, Location] = { + def loadSource( + runtimeCls: Class[_], + getLineNumber: CtClass => Either[String, Int] + ): Either[String, Location] = { val chunks = runtimeCls.getName.split('.') val (pkg, clsName) = (chunks.init, chunks.last) - for{ - bytecode <- try{ - Right(os.read.bytes(os.resource / pkg / (clsName + ".class"))).right - }catch{ case e: Throwable => - Left("Unable to find bytecode for class " + runtimeCls.getName).right - } + for { + bytecode <- + try { + Right(os.read.bytes(os.resource / pkg / (clsName + ".class"))).right + } catch { + case e: Throwable => + Left("Unable to find bytecode for class " + runtimeCls.getName).right + } // Not sure why `ctCls =` doesn't work, but for some reason the // for-comprehension desugaring totally screws it up ctCls <- Right(loadCtClsMetadata(runtimeCls, bytecode)).right lineNumber <- getLineNumber(ctCls).right srcFile <- Right(ctCls.getClassFile.getSourceFile).right - sourceCode <- try{ - Right(os.read(os.resource/ pkg / srcFile)).right - }catch{case e: Throwable => - Left("Unable to find sourcecode for class " + runtimeCls.getName).right - } + sourceCode <- + try { + Right(os.read(os.resource / pkg / srcFile)).right + } catch { + case e: Throwable => + Left("Unable to find sourcecode for class " + runtimeCls.getName).right + } } yield Location(srcFile, lineNumber, sourceCode) } - def browseSource(loaded: Either[String, Location], - verticalOffset: Int, - colors: CodeColors, - command: Int => Strings) = { + def browseSource( + loaded: Either[String, Location], + verticalOffset: Int, + colors: CodeColors, + command: Int => Strings + ) = { - loaded match{ + loaded match { case Right(loc) => val colored = - if (loc.fileName.endsWith(".scala")){ + if (loc.fileName.endsWith(".scala")) { fansi.Str(SeqCharSequence( Highlighter.defaultHighlight0( scalaparse.Scala.CompilationUnit(_), @@ -222,9 +238,9 @@ object SourceRuntime{ fansi.Attr.Reset ) )) - }else if (loc.fileName.endsWith(".java")){ + } else if (loc.fileName.endsWith(".java")) { HighlightJava.highlightJavaCode(loc.fileContent, colors) - }else { + } else { fansi.Str(loc.fileContent) } // Break apart the colored input into lines and then render each line @@ -235,27 +251,27 @@ object SourceRuntime{ val plainText = colored.plainText val output = mutable.Buffer.empty[String] var current = 0 - while({ - val next = plainText.indexOf('\n', current+1) + while ({ + val next = plainText.indexOf('\n', current + 1) if (next != -1) { output.append(colored.substring(current, next).render) current = next + 1 true - }else{ + } else { output.append(colored.substring(current, colored.length).render) false } - })() + }) () val targetLine = math.max(0, loc.lineNum - verticalOffset) val tmpFile = os.temp(output.mkString("\n"), suffix = "." + loc.fileName) os.proc(command(targetLine).values, tmpFile) - .call(stdin=os.Inherit, stdout=os.Inherit, stderr=os.Inherit) + .call(stdin = os.Inherit, stdout = os.Inherit, stderr = os.Inherit) case Left(msg) => println(msg) } } - def failLoudly[T](res: Either[String, T]): T = res match{ + def failLoudly[T](res: Either[String, T]): T = res match { case Left(s) => throw new Exception(s) case Right(r) => r } diff --git a/amm/compiler/src/main/scala-2/ammonite/compiler/tools/desugar.scala b/amm/compiler/src/main/scala-2/ammonite/compiler/tools/desugar.scala index 88150b6eb..65248e66f 100644 --- a/amm/compiler/src/main/scala-2/ammonite/compiler/tools/desugar.scala +++ b/amm/compiler/src/main/scala-2/ammonite/compiler/tools/desugar.scala @@ -3,11 +3,10 @@ package ammonite.compiler.tools import sourcecode.Compat._ import scala.language.experimental.macros - -class Desugared(s: String){ +class Desugared(s: String) { override def toString() = s } -object desugar{ +object desugar { def transformer(c: Context)(expr: c.Expr[Any]): c.Expr[Desugared] = { import c.universe._ c.Expr[Desugared]( @@ -16,4 +15,4 @@ object desugar{ } def apply(expr: Any): Desugared = macro transformer -} \ No newline at end of file +} diff --git a/amm/compiler/src/main/scala-2/ammonite/compiler/tools/source.scala b/amm/compiler/src/main/scala-2/ammonite/compiler/tools/source.scala index 65b8642e6..f3eaf9fb9 100644 --- a/amm/compiler/src/main/scala-2/ammonite/compiler/tools/source.scala +++ b/amm/compiler/src/main/scala-2/ammonite/compiler/tools/source.scala @@ -8,17 +8,15 @@ import sourcecode.Compat._ import scala.annotation.tailrec import scala.language.experimental.macros -object source{ - +object source { def load(f: => Any): Location = macro loadMacro - def loadMacro(c: Context) - (f: c.Expr[Any]): c.Expr[Location] = { + def loadMacro(c: Context)(f: c.Expr[Any]): c.Expr[Location] = { import c.universe._ - val res = breakUp(c)(f) match{ + val res = breakUp(c)(f) match { case None => q"${prefix(c)}.failLoudly(${prefix(c)}.loadObjectInfo($f))" case Some((classThingy, symbolName, lhs, returnClass, argClasses)) => @@ -35,24 +33,22 @@ object source{ """ } - c.Expr[Location](res) } - def apply(f: => Any) - (implicit colors: CodeColors): Unit = macro applyDefaultPPrinterMacro + def apply(f: => Any)(implicit colors: CodeColors): Unit = macro applyDefaultPPrinterMacro - def apply(f: => Any, pprinter: pprint.PPrinter) - (implicit colors: CodeColors): Unit = macro applyMacro + def apply(f: => Any, pprinter: pprint.PPrinter)(implicit colors: CodeColors): Unit = + macro applyMacro - def apply(f: => Any, command: Int => Strings) - (implicit colors: CodeColors): Unit = macro applyCustomizeCommandDefaultPPrinterMacro - def apply(f: => Any, command: Int => Strings, pprinter: pprint.PPrinter) - (implicit colors: CodeColors): Unit = macro applyCustomizeCommandMacro + def apply(f: => Any, command: Int => Strings)(implicit colors: CodeColors): Unit = + macro applyCustomizeCommandDefaultPPrinterMacro + def apply(f: => Any, command: Int => Strings, pprinter: pprint.PPrinter)(implicit + colors: CodeColors + ): Unit = macro applyCustomizeCommandMacro - def applyDefaultPPrinterMacro(c: Context) - (f: c.Expr[Any]) - (colors: c.Expr[CodeColors]): c.Expr[Unit] = { + def applyDefaultPPrinterMacro(c: Context)(f: c.Expr[Any])(colors: c.Expr[CodeColors]) + : c.Expr[Unit] = { import c.universe._ val defaultPPrinter = c.Expr[pprint.PPrinter]( q"${prefix(c)}.defaultPPrinter" @@ -60,9 +56,10 @@ object source{ applyMacro(c)(f, defaultPPrinter)(colors) } - def applyMacro(c: Context) - (f: c.Expr[Any], pprinter: c.Expr[pprint.PPrinter]) - (colors: c.Expr[CodeColors]): c.Expr[Unit] = { + def applyMacro(c: Context)( + f: c.Expr[Any], + pprinter: c.Expr[pprint.PPrinter] + )(colors: c.Expr[CodeColors]): c.Expr[Unit] = { import c.universe._ val defaultBrowseExpr = c.Expr[Int => Strings]( q"${prefix(c)}.browseSourceCommand" @@ -71,21 +68,24 @@ object source{ applyCustomizeCommandMacro(c)(f, defaultBrowseExpr, pprinter)(colors) } - def applyCustomizeCommandDefaultPPrinterMacro(c: Context) - (f: c.Expr[Any], command: c.Expr[Int => Strings]) - (colors: c.Expr[CodeColors]): c.Expr[Unit] = { + def applyCustomizeCommandDefaultPPrinterMacro(c: Context)( + f: c.Expr[Any], + command: c.Expr[Int => Strings] + )(colors: c.Expr[CodeColors]): c.Expr[Unit] = { import c.universe._ val defaultPPrinter = c.Expr[pprint.PPrinter]( q"${prefix(c)}.defaultPPrinter" ) applyCustomizeCommandMacro(c)(f, command, defaultPPrinter)(colors) } - def applyCustomizeCommandMacro(c: Context) - (f: c.Expr[Any], command: c.Expr[Int => Strings], pprinter: c.Expr[pprint.PPrinter]) - (colors: c.Expr[CodeColors]): c.Expr[Unit] = { + def applyCustomizeCommandMacro(c: Context)( + f: c.Expr[Any], + command: c.Expr[Int => Strings], + pprinter: c.Expr[pprint.PPrinter] + )(colors: c.Expr[CodeColors]): c.Expr[Unit] = { import c.universe._ c.Expr[Unit]( - breakUp(c)(f) match{ + breakUp(c)(f) match { case Some((classThingy, symbolName, lhs, returnClass, argClasses)) => q""" ${prefix(c)}.browseObjectMember( @@ -108,11 +108,12 @@ object source{ import c.universe._ q"ammonite.compiler.tools.SourceRuntime" } + /** - * Attempts to up an expression, into either a LHS + methodcall + rhs. We - * then look for the source of the method. If it can't be split, we look for - * the source of the class of the entire expression - */ + * Attempts to up an expression, into either a LHS + methodcall + rhs. We + * then look for the source of the method. If it can't be split, we look for + * the source of the class of the entire expression + */ def breakUp(c: Context)(f: c.Expr[Any]) = { import c.universe._ // Break up the expression into it's constituent parts @@ -125,8 +126,7 @@ object source{ // We keep the block wrapper to re-apply to the final expression later, because // sometimes (e.g. in the case of `new javassist.ClassPool().find _`) the LHS of // the last expr in the block ends up depending on the earlier statements - @tailrec def rec(wrapper: Tree => Tree, x: Tree) - : Option[(Tree, Symbol, Tree => Tree)] = { + @tailrec def rec(wrapper: Tree => Tree, x: Tree): Option[(Tree, Symbol, Tree => Tree)] = { x match { case Select(qualifier, selector) => if (selector.toString == "") None @@ -142,19 +142,19 @@ object source{ } } - for((lhs, symbol, wrapper) <- rec(identity(_), f.tree)) yield { + for ((lhs, symbol, wrapper) <- rec(identity(_), f.tree)) yield { val method = symbol.asMethod val argClasses = - for(arg <- method.paramss.flatten) - yield q"classOf[${arg.typeSignature.erasure}]" + for (arg <- method.paramss.flatten) + yield q"classOf[${arg.typeSignature.erasure}]" val staticJavaLhsClass = lhs.symbol != null && lhs.symbol.isStatic && lhs.symbol.isJava val ownerCls = symbol.owner.asClass - val paramedOwnerCls = ownerCls.typeParams.length match{ + val paramedOwnerCls = ownerCls.typeParams.length match { case 0 => ownerCls.thisPrefix.widen case n => import compat._ diff --git a/amm/compiler/src/main/scala-2/scala/tools/nsc/CustomZipAndJarFileLookupFactory.scala b/amm/compiler/src/main/scala-2/scala/tools/nsc/CustomZipAndJarFileLookupFactory.scala index 7a94d07b9..c6181b1e7 100644 --- a/amm/compiler/src/main/scala-2/scala/tools/nsc/CustomZipAndJarFileLookupFactory.scala +++ b/amm/compiler/src/main/scala-2/scala/tools/nsc/CustomZipAndJarFileLookupFactory.scala @@ -19,7 +19,7 @@ import scala.tools.nsc.util.{ClassPath, ClassRepresentation} object CustomZipAndJarFileLookupFactory { private final class ZipArchiveClassPath(val zipUrl: URL) - extends ClassPath with NoSourcePaths with AmmClassPath{ + extends ClassPath with NoSourcePaths with AmmClassPath { def zipFile: File = null @@ -96,7 +96,6 @@ object CustomZipAndJarFileLookupFactory { !file.isDirectory && file.hasExtension("class") } - def create(zipFile: AbstractFile, settings: Settings): ClassPath = new ZipArchiveClassPath(zipFile.toURL) -} \ No newline at end of file +} diff --git a/amm/compiler/src/main/scala-3/ammonite/compiler/AsmPositionUpdater.scala b/amm/compiler/src/main/scala-3/ammonite/compiler/AsmPositionUpdater.scala index 0afb0e79a..3bea8999a 100644 --- a/amm/compiler/src/main/scala-3/ammonite/compiler/AsmPositionUpdater.scala +++ b/amm/compiler/src/main/scala-3/ammonite/compiler/AsmPositionUpdater.scala @@ -8,19 +8,19 @@ import java.io.InputStream object AsmPositionUpdater { private class LineNumberTableMethodVisitor( - lineShift: Int, - delegate: asm.MethodVisitor + lineShift: Int, + delegate: asm.MethodVisitor ) extends asm.MethodVisitor(asm.Opcodes.ASM9, delegate) { override def visitLineNumber(line: Int, start: asm.Label): Unit = super.visitLineNumber(line + lineShift, start) } private class LineNumberTableClassVisitor( - mappings: Map[String, (String, Int)], - cw: asm.ClassWriter + mappings: Map[String, (String, Int)], + cw: asm.ClassWriter ) extends asm.ClassVisitor(asm.Opcodes.ASM9, cw) { private var lineShiftOpt = Option.empty[Int] - def mappedStuff = lineShiftOpt.nonEmpty + def mappedStuff = lineShiftOpt.nonEmpty override def visitSource(source: String, debug: String): Unit = mappings.get(source) match { case None => @@ -30,26 +30,26 @@ object AsmPositionUpdater { super.visitSource(newSource, debug) } override def visitMethod( - access: Int, - name: String, - descriptor: String, - signature: String, - exceptions: Array[String] + access: Int, + name: String, + descriptor: String, + signature: String, + exceptions: Array[String] ): asm.MethodVisitor = { val main = super.visitMethod(access, name, descriptor, signature, exceptions) lineShiftOpt match { - case None => main + case None => main case Some(lineShift) => new LineNumberTableMethodVisitor(lineShift, main) } } } def postProcess( - mappings: Map[String, (String, Int)], - clsInputStream: InputStream + mappings: Map[String, (String, Int)], + clsInputStream: InputStream ): Option[Array[Byte]] = { - val reader = new asm.ClassReader(clsInputStream) - val writer = new asm.ClassWriter(reader, 0) + val reader = new asm.ClassReader(clsInputStream) + val writer = new asm.ClassWriter(reader, 0) val checker = new LineNumberTableClassVisitor(mappings, writer) reader.accept(checker, 0) if (checker.mappedStuff) Some(writer.toByteArray) diff --git a/amm/compiler/src/main/scala-3/ammonite/compiler/CompilerExtensions.scala b/amm/compiler/src/main/scala-3/ammonite/compiler/CompilerExtensions.scala index bfd2dec08..bb77fb290 100644 --- a/amm/compiler/src/main/scala-3/ammonite/compiler/CompilerExtensions.scala +++ b/amm/compiler/src/main/scala-3/ammonite/compiler/CompilerExtensions.scala @@ -12,25 +12,25 @@ object CompilerExtensions { private def compilerManager = api._compilerManager.asInstanceOf[CompilerLifecycleManager] /** - * Configures the current compiler, or if the compiler hasn't been initialized - * yet, registers the configuration callback and applies it to the compiler - * when it ends up being initialized later - */ + * Configures the current compiler, or if the compiler hasn't been initialized + * yet, registers the configuration callback and applies it to the compiler + * when it ends up being initialized later + */ def configureCompiler(c: dotty.tools.dotc.Compiler => Unit): Unit = compilerManager.configureCompiler(c) /** - * Pre-configures the next compiler context. Useful for tuning options that are - * used during parsing. - */ + * Pre-configures the next compiler context. Useful for tuning options that are + * used during parsing. + */ def preConfigureCompiler(c: dotty.tools.dotc.core.Contexts.FreshContext => Unit): Unit = compilerManager.preConfigureCompiler(c) /** - * Directory where the byte code resulting from compiling the user code is written. - * This is non-empty only if the `--output-directory` or `--tmp-output-directory` options - * are passed to Ammonite upon launch. - */ + * Directory where the byte code resulting from compiling the user code is written. + * This is non-empty only if the `--output-directory` or `--tmp-output-directory` options + * are passed to Ammonite upon launch. + */ def outputDir: Option[Path] = compilerManager.outputDir } @@ -41,6 +41,7 @@ object CompilerExtensions { def initialContext: dotty.tools.dotc.core.Contexts.Context = compilerManager.compiler.initialCtx + /** * Access the compiler to do crazy things if you really want to! */ diff --git a/amm/compiler/src/main/scala-3/ammonite/compiler/CompilerLifecycleManager.scala b/amm/compiler/src/main/scala-3/ammonite/compiler/CompilerLifecycleManager.scala index f6510874a..85f1a8b8a 100644 --- a/amm/compiler/src/main/scala-3/ammonite/compiler/CompilerLifecycleManager.scala +++ b/amm/compiler/src/main/scala-3/ammonite/compiler/CompilerLifecycleManager.scala @@ -12,26 +12,25 @@ import dotc.core.Contexts.FreshContext import dotty.tools.io.AbstractFile import scala.collection.mutable - /** - * Wraps up the `Compiler` and `Pressy`, ensuring that they get properly - * initialized before use. Mostly deals with ensuring the object lifecycles - * are properly dealt with; `Compiler` and `Pressy` are the ones which deal - * with the compiler's nasty APIs - * - * Exposes a simple API where you can just call methods like `compilerClass` - * `configureCompiler` any-how and not worry about ensuring the necessary - * compiler objects are initialized, or worry about initializing them more - * than necessary - */ + * Wraps up the `Compiler` and `Pressy`, ensuring that they get properly + * initialized before use. Mostly deals with ensuring the object lifecycles + * are properly dealt with; `Compiler` and `Pressy` are the ones which deal + * with the compiler's nasty APIs + * + * Exposes a simple API where you can just call methods like `compilerClass` + * `configureCompiler` any-how and not worry about ensuring the necessary + * compiler objects are initialized, or worry about initializing them more + * than necessary + */ class CompilerLifecycleManager( - rtCacheDir: Option[Path], - headFrame: => ammonite.util.Frame, - dependencyCompleteOpt: => Option[String => (Int, Seq[String])], - classPathWhitelist: Set[Seq[String]], - initialClassLoader: ClassLoader, - val outputDir: Option[Path], - initialSettings: Seq[String] + rtCacheDir: Option[Path], + headFrame: => ammonite.util.Frame, + dependencyCompleteOpt: => Option[String => (Int, Seq[String])], + classPathWhitelist: Set[Seq[String]], + initialClassLoader: ClassLoader, + val outputDir: Option[Path], + initialSettings: Seq[String] ) extends ammonite.compiler.iface.CompilerLifecycleManager { def scalaVersion = dotc.config.Properties.versionNumberString @@ -39,8 +38,7 @@ class CompilerLifecycleManager( def forceInit(): Unit = init(force = true) def init(): Unit = init(force = false) - - private[this] object Internal{ + private[this] object Internal { outputDir.map(os.Path(_, os.pwd)).foreach(os.makeDir.all(_)) val dynamicClasspath = AbstractFile.getDirectory(outputDir.getOrElse(os.temp.dir().toNIO)) var compiler: ammonite.compiler.Compiler = null @@ -51,10 +49,8 @@ class CompilerLifecycleManager( var (lastFrame, lastFrameVersion) = (headFrame, headFrame.version) } - import Internal._ - // Public to expose it in the REPL so people can poke at it at runtime // Not for use within Ammonite! Use one of the other methods to ensure // that `Internal.compiler` is properly initialized before use. @@ -63,12 +59,11 @@ class CompilerLifecycleManager( // def pressy: Pressy = Internal.pressy - def preprocess(fileName: String) = synchronized{ + def preprocess(fileName: String) = synchronized { init() compiler.preprocessor(fileName) } - // We lazily force the compiler to be re-initialized by setting the // compilerStale flag. Otherwise, if we re-initialized the compiler eagerly, // we end up sometimes re-initializing it multiple times unnecessarily before @@ -76,12 +71,14 @@ class CompilerLifecycleManager( // re-initializations by about 2/3, each of which costs about 30ms and // probably creates a pile of garbage - def init(force: Boolean = false) = synchronized{ - if (compiler == null || - (headFrame ne lastFrame) || - headFrame.version != lastFrameVersion || - Internal.preConfiguredSettingsChanged || - force) { + def init(force: Boolean = false) = synchronized { + if ( + compiler == null || + (headFrame ne lastFrame) || + headFrame.version != lastFrameVersion || + Internal.preConfiguredSettingsChanged || + force + ) { lastFrame = headFrame lastFrameVersion = headFrame.version @@ -107,19 +104,19 @@ class CompilerLifecycleManager( } def complete( - offset: Int, - previousImports: String, - snippet: String - ): (Int, Seq[String], Seq[String]) = synchronized{ + offset: Int, + previousImports: String, + snippet: String + ): (Int, Seq[String], Seq[String]) = synchronized { init() Internal.compiler.complete(offset, previousImports, snippet) } def compileClass( - processed: ammonite.compiler.iface.Preprocessor.Output, - printer: Printer, - fileName: String - ): Option[ammonite.compiler.iface.Compiler.Output] = synchronized{ + processed: ammonite.compiler.iface.Preprocessor.Output, + printer: Printer, + fileName: String + ): Option[ammonite.compiler.iface.Compiler.Output] = synchronized { // Enforce the invariant that every piece of code Ammonite ever compiles, // gets run within the `ammonite` package. It's further namespaced into // things like `ammonite.$file` or `ammonite.$sess`, but it has to be @@ -138,9 +135,9 @@ class CompilerLifecycleManager( compiled } - def configureCompiler(callback: DottyCompiler => Unit) = synchronized{ + def configureCompiler(callback: DottyCompiler => Unit) = synchronized { onCompilerInit.append(callback) - if (compiler != null){ + if (compiler != null) { callback(compiler.compiler) } } diff --git a/amm/compiler/src/main/scala-3/ammonite/compiler/internal/CompilerHelper.scala b/amm/compiler/src/main/scala-3/ammonite/compiler/internal/CompilerHelper.scala index a9d051cd6..89aa3b736 100644 --- a/amm/compiler/src/main/scala-3/ammonite/compiler/internal/CompilerHelper.scala +++ b/amm/compiler/src/main/scala-3/ammonite/compiler/internal/CompilerHelper.scala @@ -10,6 +10,8 @@ object CompilerHelper { List(new Parser), List(new TyperPhase) ) - def messageAndPos(messageRenderer: MessageRendering, diagnostic: Diagnostic)(implicit ctx: Context) = + def messageAndPos(messageRenderer: MessageRendering, diagnostic: Diagnostic)(implicit + ctx: Context + ) = messageRenderer.messageAndPos(diagnostic) } diff --git a/amm/compiler/src/main/scala-3/ammonite/compiler/tools/source.scala b/amm/compiler/src/main/scala-3/ammonite/compiler/tools/source.scala index b8e4bf9ca..0b29861e6 100644 --- a/amm/compiler/src/main/scala-3/ammonite/compiler/tools/source.scala +++ b/amm/compiler/src/main/scala-3/ammonite/compiler/tools/source.scala @@ -2,7 +2,7 @@ package ammonite.compiler.tools import ammonite.util.Util.Location -object source{ +object source { def load(f: => Any): Location = ??? diff --git a/amm/compiler/src/main/scala-3/dotty/ammonite/compiler/DirectoryClassPath.scala b/amm/compiler/src/main/scala-3/dotty/ammonite/compiler/DirectoryClassPath.scala index f7029fa6b..af81b7189 100644 --- a/amm/compiler/src/main/scala-3/dotty/ammonite/compiler/DirectoryClassPath.scala +++ b/amm/compiler/src/main/scala-3/dotty/ammonite/compiler/DirectoryClassPath.scala @@ -18,8 +18,7 @@ case class DirectoryClassPath(dir: JFile) val wrappedClassFile = new dotty.tools.io.File(classFile.toPath) val abstractClassFile = new PlainFile(wrappedClassFile) Some(abstractClassFile) - } - else None + } else None } protected def createFileEntry(file: AbstractFile): classpath.ClassFileEntryImpl = diff --git a/amm/compiler/src/main/scala-3/dotty/ammonite/compiler/WhiteListClassPath.scala b/amm/compiler/src/main/scala-3/dotty/ammonite/compiler/WhiteListClassPath.scala index bc773bfa3..ae9d586cc 100644 --- a/amm/compiler/src/main/scala-3/dotty/ammonite/compiler/WhiteListClassPath.scala +++ b/amm/compiler/src/main/scala-3/dotty/ammonite/compiler/WhiteListClassPath.scala @@ -6,20 +6,19 @@ import dotty.tools.dotc.classpath.{ClassPathEntries, PackageName} import dotty.tools.io.ClassPath class WhiteListClasspath(aggregates: Seq[ClassPath], whitelist: Set[Seq[String]]) - extends dotty.tools.dotc.classpath.AggregateClassPath(aggregates) { + extends dotty.tools.dotc.classpath.AggregateClassPath(aggregates) { override def findClassFile(name: String) = { val tokens = name.split('.') if (Util.lookupWhiteList(whitelist, tokens.init ++ Seq(tokens.last + ".class"))) { super.findClassFile(name) - } - else None + } else None } override def list(inPackage: PackageName) = { val superList = super.list(inPackage) ClassPathEntries( - superList.packages.filter{ p => Util.lookupWhiteList(whitelist, p.name.split('.')) }, - superList.classesAndSources.filter{ t => + superList.packages.filter { p => Util.lookupWhiteList(whitelist, p.name.split('.')) }, + superList.classesAndSources.filter { t => Util.lookupWhiteList(whitelist, inPackage.dottedString.split('.') ++ Seq(t.name + ".class")) } ) @@ -27,4 +26,4 @@ class WhiteListClasspath(aggregates: Seq[ClassPath], whitelist: Set[Seq[String]] override def toString: String = s"WhiteListClasspath($aggregates, ${whitelist.size} white-listed elements)" -} \ No newline at end of file +} diff --git a/amm/compiler/src/main/scala-not-2.12.10-2.13.1+/scala/tools/nsc/AmmClassPath.scala b/amm/compiler/src/main/scala-not-2.12.10-2.13.1+/scala/tools/nsc/AmmClassPath.scala index d79fbe978..af3a3befa 100644 --- a/amm/compiler/src/main/scala-not-2.12.10-2.13.1+/scala/tools/nsc/AmmClassPath.scala +++ b/amm/compiler/src/main/scala-not-2.12.10-2.13.1+/scala/tools/nsc/AmmClassPath.scala @@ -10,8 +10,7 @@ import scala.tools.nsc.classpath.FileUtils.AbstractFileOps import scala.tools.nsc.classpath.{ClassPathEntries, _} import scala.tools.nsc.util.{ClassPath, ClassRepresentation} - -trait AmmClassPath extends ClassPath{ +trait AmmClassPath extends ClassPath { def zipUrl: URL def ammPackages(inPackage: String): Seq[PackageEntry] def packages(inPackage: String): Seq[PackageEntry] = { @@ -23,7 +22,6 @@ trait AmmClassPath extends ClassPath{ ammList(inPackage) } - def ammClasses(inPackage: String): Seq[ClassFileEntry] def classes(inPackage: String): Seq[ClassFileEntry] = { ammClasses(inPackage) diff --git a/amm/compiler/src/main/scala-not-2.12.10-2.13.1+/scala/tools/nsc/WhiteListClasspath.scala b/amm/compiler/src/main/scala-not-2.12.10-2.13.1+/scala/tools/nsc/WhiteListClasspath.scala index cf396c1f9..e3485359d 100644 --- a/amm/compiler/src/main/scala-not-2.12.10-2.13.1+/scala/tools/nsc/WhiteListClasspath.scala +++ b/amm/compiler/src/main/scala-not-2.12.10-2.13.1+/scala/tools/nsc/WhiteListClasspath.scala @@ -6,21 +6,20 @@ import scala.tools.nsc.classpath.ClassPathEntries import scala.tools.nsc.util.ClassPath class WhiteListClasspath(aggregates: Seq[ClassPath], whitelist: Set[Seq[String]]) - extends scala.tools.nsc.classpath.AggregateClassPath(aggregates) { + extends scala.tools.nsc.classpath.AggregateClassPath(aggregates) { override def findClassFile(name: String) = { val tokens = name.split('.') if (Util.lookupWhiteList(whitelist, tokens.init ++ Seq(tokens.last + ".class"))) { super.findClassFile(name) - } - else None + } else None } override def list(inPackage: String) = { val superList = super.list(inPackage) ClassPathEntries( - superList.packages.filter{ p => Util.lookupWhiteList(whitelist, p.name.split('.')) }, - superList.classesAndSources.filter{ t => + superList.packages.filter { p => Util.lookupWhiteList(whitelist, p.name.split('.')) }, + superList.classesAndSources.filter { t => Util.lookupWhiteList(whitelist, inPackage.split('.') ++ Seq(t.name + ".class")) } ) } -} \ No newline at end of file +} diff --git a/amm/compiler/src/main/scala-not-2.12.13+-2.13.1+/ammonite/compiler/MakeReporter.scala b/amm/compiler/src/main/scala-not-2.12.13+-2.13.1+/ammonite/compiler/MakeReporter.scala index f9775817b..a0aa55f83 100644 --- a/amm/compiler/src/main/scala-not-2.12.13+-2.13.1+/ammonite/compiler/MakeReporter.scala +++ b/amm/compiler/src/main/scala-not-2.12.13+-2.13.1+/ammonite/compiler/MakeReporter.scala @@ -12,19 +12,21 @@ import scala.tools.nsc.plugins.Plugin import scala.tools.nsc.reporters.AbstractReporter import scala.tools.nsc.typechecker.Analyzer -object MakeReporter{ +object MakeReporter { type Reporter = AbstractReporter - def makeReporter(errorLogger: (Position, String) => Unit, - warningLogger: (Position, String) => Unit, - infoLogger: (Position, String) => Unit, - outerSettings: Settings): Reporter = + def makeReporter( + errorLogger: (Position, String) => Unit, + warningLogger: (Position, String) => Unit, + infoLogger: (Position, String) => Unit, + outerSettings: Settings + ): Reporter = new AbstractReporter { def displayPrompt(): Unit = ??? def display(pos: Position, msg: String, severity: Severity) = { - severity match{ + severity match { case ERROR => Classpath.traceClasspathProblem(s"ERROR: $msg") errorLogger(pos, msg) @@ -37,4 +39,4 @@ object MakeReporter{ val settings = outerSettings } -} \ No newline at end of file +} diff --git a/amm/compiler/src/main/scala/ammonite/compiler/CodeClassWrapper.scala b/amm/compiler/src/main/scala/ammonite/compiler/CodeClassWrapper.scala index d20541e40..63d62ca5c 100644 --- a/amm/compiler/src/main/scala/ammonite/compiler/CodeClassWrapper.scala +++ b/amm/compiler/src/main/scala/ammonite/compiler/CodeClassWrapper.scala @@ -6,7 +6,7 @@ import ammonite.util.Util.{CodeSource, newLine, normalizeNewlines} import scala.language.postfixOps -object CodeClassWrapper extends CodeWrapper{ +object CodeClassWrapper extends CodeWrapper { /* * The goal of this code wrapper is that the user code: * - should be in a class rather than a singleton, @@ -29,12 +29,12 @@ object CodeClassWrapper extends CodeWrapper{ private val tq = "\"\"\"" override val wrapperPath: Seq[Name] = Seq(Name("instance")) def apply( - code: String, - source: CodeSource, - imports: Imports, - printCode: String, - indexedWrapperName: Name, - extraCode: String + code: String, + source: CodeSource, + imports: Imports, + printCode: String, + indexedWrapperName: Name, + extraCode: String ) = { import source.pkgName val isObjDef = Parsers.isObjDef(code) @@ -51,8 +51,7 @@ object ${indexedWrapperName.backticked}{ def $$main() = instance.$$main() object Helper extends _root_.java.io.Serializable { -""" - ) +""") val bottom = normalizeNewlines(s"""\ndef $$main() = { $printCode } override def toString = "${indexedWrapperName.encoded}"; @@ -113,7 +112,7 @@ object ${indexedWrapperName.backticked}{ "_root_.ammonite.repl.ReplBridge.value.usedEarlierDefinitions.iterator.toSet" } - val top = normalizeNewlines(s""" + val top = normalizeNewlines(s""" package ${pkgName.head.encoded} package ${Util.encodeScalaSourcePath(pkgName.tail)} @@ -132,8 +131,7 @@ override def toString = $q${indexedWrapperName.encoded}$q $requiredVals $reworkedImports -final class Helper extends _root_.java.io.Serializable{\n""" - ) +final class Helper extends _root_.java.io.Serializable{\n""") val bottom = normalizeNewlines(s"""\ndef $$main() = { $printCode } diff --git a/amm/compiler/src/main/scala/ammonite/compiler/DefaultCodeWrapper.scala b/amm/compiler/src/main/scala/ammonite/compiler/DefaultCodeWrapper.scala index 06d5c62f9..496d45bb6 100644 --- a/amm/compiler/src/main/scala/ammonite/compiler/DefaultCodeWrapper.scala +++ b/amm/compiler/src/main/scala/ammonite/compiler/DefaultCodeWrapper.scala @@ -4,16 +4,15 @@ import ammonite.compiler.iface.CodeWrapper import ammonite.util._ import ammonite.util.Util.{CodeSource, normalizeNewlines} - -object DefaultCodeWrapper extends CodeWrapper{ +object DefaultCodeWrapper extends CodeWrapper { private val userCodeNestingLevel = 1 def apply( - code: String, - source: CodeSource, - imports: Imports, - printCode: String, - indexedWrapperName: Name, - extraCode: String + code: String, + source: CodeSource, + imports: Imports, + printCode: String, + indexedWrapperName: Name, + extraCode: String ) = { import source.pkgName val top = normalizeNewlines(s""" @@ -21,9 +20,8 @@ package ${pkgName.head.encoded} package ${Util.encodeScalaSourcePath(pkgName.tail)} $imports -object ${indexedWrapperName.backticked}{\n""" - ) - val bottom = normalizeNewlines(s"""\ndef $$main() = { $printCode } +object ${indexedWrapperName.backticked}{\n""") + val bottom = normalizeNewlines(s"""\ndef $$main() = { $printCode } override def toString = "${indexedWrapperName.encoded}" $extraCode } diff --git a/amm/interp/api/src/main/scala/ammonite/Stubs.scala b/amm/interp/api/src/main/scala/ammonite/Stubs.scala index c3202a2d0..a42aa6c20 100644 --- a/amm/interp/api/src/main/scala/ammonite/Stubs.scala +++ b/amm/interp/api/src/main/scala/ammonite/Stubs.scala @@ -4,51 +4,51 @@ package ammonite // packages in the Scala compiler. /** - * Package that gets filled with any script files that the user imports - */ -package $file{ + * Package that gets filled with any script files that the user imports + */ +package $file { object $ } /** - * Package that gets filled with any script files that the user imports - */ -package $exec{ + * Package that gets filled with any script files that the user imports + */ +package $exec { object $ } /** - * Package that gets filled with ivy artifacts the user loads - */ -package $ivy{ + * Package that gets filled with ivy artifacts the user loads + */ +package $ivy { object $ } /** - * Package that gets filled with any web scripts people load from http URLs - */ -package $url{ + * Package that gets filled with any web scripts people load from http URLs + */ +package $url { object $ } /** - * Package to import from when you don't want to import anything - */ -package $stub{ + * Package to import from when you don't want to import anything + */ +package $stub { object $ } /** - * Package to import from when you don't want to import anything - */ -package $cp{ + * Package to import from when you don't want to import anything + */ +package $cp { object $ } /** - * Package to import from when you don't want to import anything - */ -package $plugin{ + * Package to import from when you don't want to import anything + */ +package $plugin { object $ } diff --git a/amm/interp/api/src/main/scala/ammonite/interp/api/AmmoniteExit.scala b/amm/interp/api/src/main/scala/ammonite/interp/api/AmmoniteExit.scala index cbbec90dc..de322f162 100644 --- a/amm/interp/api/src/main/scala/ammonite/interp/api/AmmoniteExit.scala +++ b/amm/interp/api/src/main/scala/ammonite/interp/api/AmmoniteExit.scala @@ -3,6 +3,6 @@ package ammonite.interp.api import scala.util.control.ControlThrowable /** - * Thrown to exit the REPL cleanly - */ + * Thrown to exit the REPL cleanly + */ case class AmmoniteExit(value: Any) extends ControlThrowable diff --git a/amm/interp/api/src/main/scala/ammonite/interp/api/InterpAPI.scala b/amm/interp/api/src/main/scala/ammonite/interp/api/InterpAPI.scala index e785ba6b2..0f04b2c3d 100644 --- a/amm/interp/api/src/main/scala/ammonite/interp/api/InterpAPI.scala +++ b/amm/interp/api/src/main/scala/ammonite/interp/api/InterpAPI.scala @@ -1,20 +1,19 @@ package ammonite.interp.api - import ammonite.util.{Colors, Ref} import coursierapi.{Dependency, Fetch, Repository} import scala.collection.mutable - object InterpBridge extends APIHolder[InterpAPI] trait InterpAPI { + /** - * When running a script in `--watch` mode, re-run the main script if this - * file changes. By default, this happens for all script files, but you can - * call this to watch arbitrary files your script may depend on - */ + * When running a script in `--watch` mode, re-run the main script if this + * file changes. By default, this happens for all script files, but you can + * call this to watch arbitrary files your script may depend on + */ def watch(p: os.Path): Unit /** @@ -24,9 +23,9 @@ trait InterpAPI { def watchValue[T](v: => T): T /** - * The colors that will be used to render the Ammonite REPL in the terminal, - * or for rendering miscellaneous info messages when running scripts. - */ + * The colors that will be used to render the Ammonite REPL in the terminal, + * or for rendering miscellaneous info messages when running scripts. + */ val colors: Ref[Colors] /** @@ -40,23 +39,25 @@ trait InterpAPI { def repositories: Ref[List[Repository]] /** - * Functions that will be chained and called on the coursier - * Fetch object right before they are run - */ + * Functions that will be chained and called on the coursier + * Fetch object right before they are run + */ val resolutionHooks: mutable.Buffer[Fetch => Fetch] /** - * Exit the Ammonite REPL. You can also use Ctrl-D to exit - */ + * Exit the Ammonite REPL. You can also use Ctrl-D to exit + */ def exit = throw AmmoniteExit(()) + /** - * Exit the Ammonite REPL. You can also use Ctrl-D to exit - */ + * Exit the Ammonite REPL. You can also use Ctrl-D to exit + */ def exit(value: Any) = throw AmmoniteExit(value) + /** - * Functions that will be chained and called on the - * exitValue before the repl exits - */ + * Functions that will be chained and called on the + * exitValue before the repl exits + */ val beforeExitHooks: mutable.Buffer[Any => Any] implicit def scalaVersion: ScalaVersion @@ -64,28 +65,30 @@ trait InterpAPI { def _compilerManager: ammonite.compiler.iface.CompilerLifecycleManager } - trait LoadJar { /** * Load a `.jar` file or directory into your JVM classpath */ def cp(jar: os.Path): Unit + /** - * Load a `.jar` from a URL into your JVM classpath - */ + * Load a `.jar` from a URL into your JVM classpath + */ def cp(jar: java.net.URL): Unit + /** * Load one or more `.jar` files or directories into your JVM classpath */ def cp(jars: Seq[os.Path]): Unit + /** * Load a library from its maven/ivy coordinates */ def ivy(coordinates: Dependency*): Unit } -trait InterpLoad extends LoadJar{ +trait InterpLoad extends LoadJar { def module(path: os.Path): Unit diff --git a/amm/interp/api/src/main/scala/ammonite/interp/api/IvyConstructor.scala b/amm/interp/api/src/main/scala/ammonite/interp/api/IvyConstructor.scala index f29f3c60e..2e69389c2 100644 --- a/amm/interp/api/src/main/scala/ammonite/interp/api/IvyConstructor.scala +++ b/amm/interp/api/src/main/scala/ammonite/interp/api/IvyConstructor.scala @@ -12,15 +12,15 @@ object IvyConstructor extends IvyConstructor { } } -trait IvyConstructor{ - implicit class GroupIdExt(groupId: String){ +trait IvyConstructor { + implicit class GroupIdExt(groupId: String) { def %(artifactId: String) = Module.of(groupId, artifactId) def %%(artifactId: String)(implicit sv: ScalaVersion) = Module.of( groupId, artifactId + "_" + IvyConstructor.scalaBinaryVersion(sv.value) ) } - implicit class ArtifactIdExt(t: Module){ + implicit class ArtifactIdExt(t: Module) { def %(version: String) = Dependency.of(t, version) } } diff --git a/amm/interp/src/main/scala/ammonite/interp/DependencyLoader.scala b/amm/interp/src/main/scala/ammonite/interp/DependencyLoader.scala index c20b87773..77616918e 100644 --- a/amm/interp/src/main/scala/ammonite/interp/DependencyLoader.scala +++ b/amm/interp/src/main/scala/ammonite/interp/DependencyLoader.scala @@ -7,10 +7,10 @@ import ammonite.util.Printer import coursierapi.{Dependency, Fetch, Repository} final class DependencyLoader( - printer: Printer, - storage: Storage, - alreadyLoadedDependencies: Seq[Dependency], - verboseOutput: Boolean + printer: Printer, + storage: Storage, + alreadyLoadedDependencies: Seq[Dependency], + verboseOutput: Boolean ) { private val alwaysExclude = alreadyLoadedDependencies @@ -18,9 +18,9 @@ final class DependencyLoader( .toSet def load( - coordinates: Seq[Dependency], - repositories: => Seq[Repository], - resolutionHooks: Seq[Fetch => Fetch] + coordinates: Seq[Dependency], + repositories: => Seq[Repository], + resolutionHooks: Seq[Fetch => Fetch] ): Either[String, Seq[File]] = { val repositories0 = repositories val cacheKey = ( @@ -29,7 +29,7 @@ final class DependencyLoader( // FIXME Add resolutionHooks somehow? ) - storage.ivyCache().get(cacheKey) match{ + storage.ivyCache().get(cacheKey) match { case Some(res) => Right(res.map(new java.io.File(_))) case None => ammonite.runtime.tools.IvyThing.resolveArtifact( @@ -44,11 +44,12 @@ final class DependencyLoader( verbose = verboseOutput, output = printer.errStream, hooks = resolutionHooks - )match{ + ) match { case Right((canBeCached, loaded)) => if (canBeCached) storage.ivyCache() = storage.ivyCache().updated( - cacheKey, loaded.map(_.getAbsolutePath) + cacheKey, + loaded.map(_.getAbsolutePath) ) Right(loaded) case Left(l) => diff --git a/amm/interp/src/main/scala/ammonite/interp/Interpreter.scala b/amm/interp/src/main/scala/ammonite/interp/Interpreter.scala index e186949c2..5430d3984 100644 --- a/amm/interp/src/main/scala/ammonite/interp/Interpreter.scala +++ b/amm/interp/src/main/scala/ammonite/interp/Interpreter.scala @@ -26,19 +26,19 @@ import coursierapi.{Dependency, Fetch, Repository} * to interpret Scala code. Doesn't attempt to provide any * real encapsulation for now. */ -class Interpreter(val compilerBuilder: CompilerBuilder, - // by-name, so that fastparse isn't loaded when we don't need it - parser: () => Parser, - getFrame: () => Frame, - val createFrame: () => Frame, - replCodeWrapper: CodeWrapper, - val scriptCodeWrapper: CodeWrapper, - parameters: Interpreter.Parameters = Interpreter.Parameters()) - extends ImportHook.InterpreterInterface{ interp => +class Interpreter( + val compilerBuilder: CompilerBuilder, + // by-name, so that fastparse isn't loaded when we don't need it + parser: () => Parser, + getFrame: () => Frame, + val createFrame: () => Frame, + replCodeWrapper: CodeWrapper, + val scriptCodeWrapper: CodeWrapper, + parameters: Interpreter.Parameters = Interpreter.Parameters() +) extends ImportHook.InterpreterInterface { interp => import parameters._ - def headFrame = getFrame() val repositories = Ref(ammonite.runtime.tools.IvyThing.defaultRepositories) val resolutionHooks = mutable.Buffer.empty[Fetch => Fetch] @@ -92,7 +92,6 @@ class Interpreter(val compilerBuilder: CompilerBuilder, verboseOutput ) - // Use a var and callbacks instead of a fold, because when running // `processModule0` user code may end up calling `processModule` which depends // on `predefImports`, and we should be able to provide the "current" imports @@ -102,18 +101,18 @@ class Interpreter(val compilerBuilder: CompilerBuilder, // Needs to be run after the Interpreter has been instantiated, as some of the // ReplAPIs available in the predef need access to the Interpreter object def initializePredef( - basePredefs: Seq[PredefInfo], - customPredefs: Seq[PredefInfo], - // Allows you to set up additional "bridges" between the REPL - // world and the outside world, by passing in the full name - // of the `APIHolder` object that will hold the bridge and - // the object that will be placed there. Needs to be passed - // in as a callback rather than run manually later as these - // bridges need to be in place *before* the predef starts - // running, so you can use them predef to e.g. configure - // the REPL before it starts - extraBridges: Seq[(String, String, AnyRef)], - baseImports: Imports = Interpreter.predefImports + basePredefs: Seq[PredefInfo], + customPredefs: Seq[PredefInfo], + // Allows you to set up additional "bridges" between the REPL + // world and the outside world, by passing in the full name + // of the `APIHolder` object that will hold the bridge and + // the object that will be placed there. Needs to be passed + // in as a callback rather than run manually later as these + // bridges need to be in place *before* the predef starts + // running, so you can use them predef to e.g. configure + // the REPL before it starts + extraBridges: Seq[(String, String, AnyRef)], + baseImports: Imports = Interpreter.predefImports ): Option[(Res.Failing, Seq[(Watchable, Long)])] = { headFrame.classloader.specialLocalClasses ++= Seq( @@ -143,7 +142,7 @@ class Interpreter(val compilerBuilder: CompilerBuilder, processModule(_, _, autoImport = false, "", _), imports => predefImports = predefImports ++ imports, watch - ) match{ + ) match { case Res.Success(_) => None case Res.Skip => None case r @ Res.Exception(t, s) => Some((r, watchedValues.toSeq)) @@ -157,15 +156,16 @@ class Interpreter(val compilerBuilder: CompilerBuilder, def watch(p: os.Path) = watchedValues.append( (Watchable.Path(p), Watchable.pathSignature(p)) ) - def watchValue[T](v: => T) = watchedValues.append((new Watchable { def poll() = v.hashCode.toLong }, v.hashCode().toLong)) + def watchValue[T](v: => T) = + watchedValues.append((new Watchable { def poll() = v.hashCode.toLong }, v.hashCode().toLong)) def resolveSingleImportHook( - source: CodeSource, - tree: ImportTree, - wrapperPath: Seq[Name] - ) = synchronized{ - val hookOpt = importHooks.find{case (k, v) => tree.strippedPrefix.startsWith(k)} - for{ + source: CodeSource, + tree: ImportTree, + wrapperPath: Seq[Name] + ) = synchronized { + val hookOpt = importHooks.find { case (k, v) => tree.strippedPrefix.startsWith(k) } + for { (hookPrefix, hook) <- Res(hookOpt, s"Import Hook ${tree.prefix} could not be resolved") hooked <- Res( hook.handle( @@ -175,12 +175,15 @@ class Interpreter(val compilerBuilder: CompilerBuilder, wrapperPath ) ) - hookResults <- Res.map(hooked){ + hookResults <- Res.map(hooked) { case res: ImportHook.Result.Source => - for{ + for { processed <- processModule( - res.code, res.codeSource, - autoImport = false, extraCode = "", hardcoded = false + res.code, + res.codeSource, + autoImport = false, + extraCode = "", + hardcoded = false ) } yield { // For $file imports, we do not propagate any imports from the imported scripted @@ -193,7 +196,6 @@ class Interpreter(val compilerBuilder: CompilerBuilder, else processed.blockInfo.last.finalImports ++ res.hookImports } case res: ImportHook.Result.ClassPath => - if (res.plugin) headFrame.addPluginClasspath(res.files.map(_.toNIO.toUri.toURL)) else headFrame.addClasspath(res.files.map(_.toNIO.toUri.toURL)) @@ -207,26 +209,28 @@ class Interpreter(val compilerBuilder: CompilerBuilder, } yield hookResults } - - - def resolveImportHooks(importTrees: Seq[ImportTree], - hookedStmts: Seq[String], - source: CodeSource, - wrapperPath: Seq[Name]): Res[ImportHookInfo] = synchronized{ + def resolveImportHooks( + importTrees: Seq[ImportTree], + hookedStmts: Seq[String], + source: CodeSource, + wrapperPath: Seq[Name] + ): Res[ImportHookInfo] = synchronized { for (hookImports <- Res.map(importTrees)(resolveSingleImportHook(source, _, wrapperPath))) - yield ImportHookInfo( - Imports(hookImports.flatten.flatMap(_.value)), - hookedStmts, - importTrees - ) + yield ImportHookInfo( + Imports(hookImports.flatten.flatMap(_.value)), + hookedStmts, + importTrees + ) } - def processLine(code: String, - stmts: Seq[String], - currentLine: Int, - silent: Boolean = false, - incrementLine: () => Unit): Res[Evaluated] = synchronized{ + def processLine( + code: String, + stmts: Seq[String], + currentLine: Int, + silent: Boolean = false, + incrementLine: () => Unit + ): Res[Evaluated] = synchronized { val wrapperName = Name(wrapperNamePrefix + currentLine) @@ -234,11 +238,11 @@ class Interpreter(val compilerBuilder: CompilerBuilder, wrapperName, Seq(), Seq(Name("ammonite"), Name("$sess")), - Some(wd/"(console)") + Some(wd / "(console)") ) val (hookStmts, importTrees) = parser().parseImportHooks(codeSource, stmts) - for{ + for { _ <- Catching { case ex => Res.Exception(ex, "") } ImportHookInfo(hookImports, hookStmts, _) <- resolveImportHooks( importTrees, @@ -262,21 +266,23 @@ class Interpreter(val compilerBuilder: CompilerBuilder, ) (out, tag) <- evaluateLine( processed, - wrapperName.encoded + ".sc", wrapperName, + wrapperName.encoded + ".sc", + wrapperName, silent, incrementLine ) } yield out.copy(imports = out.imports ++ hookImports) } - - def evaluateLine(processed: Preprocessor.Output, - fileName: String, - indexedWrapperName: Name, - silent: Boolean = false, - incrementLine: () => Unit): Res[(Evaluated, Tag)] = synchronized{ - for{ - _ <- Catching{ case e: ThreadDeath => Evaluator.interrupted(e) } + def evaluateLine( + processed: Preprocessor.Output, + fileName: String, + indexedWrapperName: Name, + silent: Boolean = false, + incrementLine: () => Unit + ): Res[(Evaluated, Tag)] = synchronized { + for { + _ <- Catching { case e: ThreadDeath => Evaluator.interrupted(e) } output <- Res( compilerManager.compileClass( processed, @@ -299,11 +305,11 @@ class Interpreter(val compilerBuilder: CompilerBuilder, } yield (res, Tag("", "", classPathWhitelist.hashCode().toString)) } - - def processSingleBlock(processed: Preprocessor.Output, - codeSource0: CodeSource, - indexedWrapperName: Name) = synchronized{ - + def processSingleBlock( + processed: Preprocessor.Output, + codeSource0: CodeSource, + indexedWrapperName: Name + ) = synchronized { val codeSource = codeSource0.copy(wrapperName = indexedWrapperName) val fullyQualifiedName = codeSource.jvmPathPrefix @@ -315,10 +321,12 @@ class Interpreter(val compilerBuilder: CompilerBuilder, ) for { - _ <- Catching{case e: Throwable => e.printStackTrace(); throw e} + _ <- Catching { case e: Throwable => e.printStackTrace(); throw e } output <- Res( compilerManager.compileClass( - processed, printer, codeSource.printablePath + processed, + printer, + codeSource.printablePath ), "Compilation Failed" ) @@ -344,16 +352,16 @@ class Interpreter(val compilerBuilder: CompilerBuilder, } } + def processModule( + code: String, + codeSource: CodeSource, + autoImport: Boolean, + extraCode: String, + hardcoded: Boolean, + moduleCodeWrapper: CodeWrapper = scriptCodeWrapper + ): Res[ScriptOutput.Metadata] = synchronized { - def processModule(code: String, - codeSource: CodeSource, - autoImport: Boolean, - extraCode: String, - hardcoded: Boolean, - moduleCodeWrapper: CodeWrapper = scriptCodeWrapper) - : Res[ScriptOutput.Metadata] = synchronized{ - - alreadyLoadedFiles.get(codeSource) match{ + alreadyLoadedFiles.get(codeSource) match { case Some(x) => Res.Success(x) case None => val tag = Tag( @@ -365,13 +373,11 @@ class Interpreter(val compilerBuilder: CompilerBuilder, classPathWhitelist.hashCode().toString ) - val cachedScriptData = storage.classFilesListLoad( os.sub / codeSource.filePathPrefix, tag ) - // Lazy, because we may not always need this if the script is already cached // and none of it's blocks end up needing to be re-compiled. We don't know up // front if any blocks will need re-compilation, because it may import $file @@ -381,7 +387,7 @@ class Interpreter(val compilerBuilder: CompilerBuilder, codeSource.printablePath ) - for{ + for { blocks <- cachedScriptData match { case None => Res(splittedScript).map(_.map(_ => None)) case Some(scriptOutput) => @@ -415,83 +421,83 @@ class Interpreter(val compilerBuilder: CompilerBuilder, } } - def processExec(code: String, - currentLine: Int, - incrementLine: () => Unit): Res[Imports] = synchronized{ - val wrapperName = Name(wrapperNamePrefix + currentLine) - val fileName = wrapperName.encoded + ".sc" - for { - blocks <- Res(parser().splitScript(Interpreter.skipSheBangLine(code), fileName)) - - metadata <- processAllScriptBlocks( - blocks.map(_ => None), - Res.Success(blocks), - predefImports ++ frameImports, - CodeSource( - wrapperName, - Seq(), - Seq(Name("ammonite"), Name("$sess")), - Some(wd/"(console)") - ), - (processed, indexedWrapperName) => - evaluateLine(processed, fileName, indexedWrapperName, false, incrementLine), - autoImport = true, - "" - ) - } yield { - metadata.blockInfo.last.finalImports + def processExec(code: String, currentLine: Int, incrementLine: () => Unit): Res[Imports] = + synchronized { + val wrapperName = Name(wrapperNamePrefix + currentLine) + val fileName = wrapperName.encoded + ".sc" + for { + blocks <- Res(parser().splitScript(Interpreter.skipSheBangLine(code), fileName)) + + metadata <- processAllScriptBlocks( + blocks.map(_ => None), + Res.Success(blocks), + predefImports ++ frameImports, + CodeSource( + wrapperName, + Seq(), + Seq(Name("ammonite"), Name("$sess")), + Some(wd / "(console)") + ), + (processed, indexedWrapperName) => + evaluateLine(processed, fileName, indexedWrapperName, false, incrementLine), + autoImport = true, + "" + ) + } yield { + metadata.blockInfo.last.finalImports + } } - } - type BlockData = Option[(ClassFiles, ScriptOutput.BlockMetadata)] - - def processAllScriptBlocks(blocks: Seq[BlockData], - splittedScript: => Res[IndexedSeq[(String, Seq[String])]], - startingImports: Imports, - codeSource: CodeSource, - evaluate: (Preprocessor.Output, Name) => Res[(Evaluated, Tag)], - autoImport: Boolean, - extraCode: String): Res[ScriptOutput.Metadata] = synchronized{ + def processAllScriptBlocks( + blocks: Seq[BlockData], + splittedScript: => Res[IndexedSeq[(String, Seq[String])]], + startingImports: Imports, + codeSource: CodeSource, + evaluate: (Preprocessor.Output, Name) => Res[(Evaluated, Tag)], + autoImport: Boolean, + extraCode: String + ): Res[ScriptOutput.Metadata] = synchronized { // we store the old value, because we will reassign this in the loop val outerScriptImportCallback = scriptImportCallback /** - * Iterate over the blocks of a script keeping track of imports. - * - * We keep track of *both* the `scriptImports` as well as the `lastImports` - * because we want to be able to make use of any import generated in the - * script within its blocks, but at the end we only want to expose the - * imports generated by the last block to who-ever loaded the script - * - * @param blocks the compilation block of the script, separated by `@`s. - * Each one is a tuple containing the leading whitespace and - * a sequence of statements in that block - * - * @param scriptImports the set of imports that apply to the current - * compilation block, excluding that of the last - * block that was processed since that is held - * separately in `lastImports` and treated - * specially - * - * @param lastImports the imports created by the last block that was processed; - * only imports created by that - * - * @param wrapperIndex a counter providing the index of the current block, so - * e.g. if `Foo.sc` has multiple blocks they can be named - * `Foo_1` `Foo_2` etc. - * - * @param perBlockMetadata an accumulator for the processed metadata of each block - * that is fed in - */ - @tailrec def loop(blocks: Seq[BlockData], - scriptImports: Imports, - lastImports: Imports, - wrapperIndex: Int, - perBlockMetadata: List[ScriptOutput.BlockMetadata]) - : Res[ScriptOutput.Metadata] = { + * Iterate over the blocks of a script keeping track of imports. + * + * We keep track of *both* the `scriptImports` as well as the `lastImports` + * because we want to be able to make use of any import generated in the + * script within its blocks, but at the end we only want to expose the + * imports generated by the last block to who-ever loaded the script + * + * @param blocks the compilation block of the script, separated by `@`s. + * Each one is a tuple containing the leading whitespace and + * a sequence of statements in that block + * + * @param scriptImports the set of imports that apply to the current + * compilation block, excluding that of the last + * block that was processed since that is held + * separately in `lastImports` and treated + * specially + * + * @param lastImports the imports created by the last block that was processed; + * only imports created by that + * + * @param wrapperIndex a counter providing the index of the current block, so + * e.g. if `Foo.sc` has multiple blocks they can be named + * `Foo_1` `Foo_2` etc. + * + * @param perBlockMetadata an accumulator for the processed metadata of each block + * that is fed in + */ + @tailrec def loop( + blocks: Seq[BlockData], + scriptImports: Imports, + lastImports: Imports, + wrapperIndex: Int, + perBlockMetadata: List[ScriptOutput.BlockMetadata] + ): Res[ScriptOutput.Metadata] = { if (blocks.isEmpty) { // No more blocks // if we have imports to pass to the upper layer we do that @@ -508,11 +514,10 @@ class Interpreter(val compilerBuilder: CompilerBuilder, // pretty printing results is disabled for scripts val indexedWrapperName = Interpreter.indexWrapperName(codeSource.wrapperName, wrapperIndex) - def compileRunBlock(leadingSpaces: String, hookInfo: ImportHookInfo) = { - val printSuffix = if (wrapperIndex == 1) "" else " #" + wrapperIndex + val printSuffix = if (wrapperIndex == 1) "" else " #" + wrapperIndex printer.info("Compiling " + codeSource.printablePath + printSuffix) - for{ + for { processed <- compilerManager.preprocess(codeSource.fileName).transform( hookInfo.stmts, "", @@ -536,9 +541,7 @@ class Interpreter(val compilerBuilder: CompilerBuilder, ) } - - - val cachedLoaded = for{ + val cachedLoaded = for { (classFiles, blockMetadata) <- blocks.head // We don't care about the results of resolving the import hooks; // Assuming they still *can* be resolved, the `envHash` check will @@ -554,7 +557,7 @@ class Interpreter(val compilerBuilder: CompilerBuilder, val envHash = Interpreter.cacheTag(evalClassloader.classpathHash(codeSource.path)) if (envHash != blockMetadata.id.tag.env) { compileRunBlock(blockMetadata.leadingSpaces, blockMetadata.hookInfo) - } else{ + } else { compilerManager.addToClasspath(classFiles) val cls = eval.loadClass(blockMetadata.id.wrapperPath, classFiles) @@ -566,24 +569,27 @@ class Interpreter(val compilerBuilder: CompilerBuilder, } } - val res = cachedLoaded.getOrElse{ - for{ + val res = cachedLoaded.getOrElse { + for { allSplittedChunks <- splittedScript (leadingSpaces, stmts) = allSplittedChunks(wrapperIndex - 1) (hookStmts, importTrees) = parser().parseImportHooks(codeSource, stmts) hookInfo <- resolveImportHooks( - importTrees, hookStmts, codeSource, scriptCodeWrapper.wrapperPath + importTrees, + hookStmts, + codeSource, + scriptCodeWrapper.wrapperPath ) res <- compileRunBlock(leadingSpaces, hookInfo) } yield res } - res match{ + res match { case Res.Success(blockMetadata) => val last = blockMetadata.hookInfo.imports ++ - blockMetadata.finalImports ++ - nestedScriptImports + blockMetadata.finalImports ++ + nestedScriptImports loop( blocks.tail, @@ -606,10 +612,10 @@ class Interpreter(val compilerBuilder: CompilerBuilder, // Wrapper, Wrapper2, Wrapper3, Wrapper4, ... try { - for(res <- loop(blocks, startingImports, Imports(), wrapperIndex = 1, List())) - // We build up `blockInfo` backwards, since it's a `List`, so reverse it - // before giving it to the outside world - yield ScriptOutput.Metadata(res.blockInfo.reverse) + for (res <- loop(blocks, startingImports, Imports(), wrapperIndex = 1, List())) + // We build up `blockInfo` backwards, since it's a `List`, so reverse it + // before giving it to the outside world + yield ScriptOutput.Metadata(res.blockInfo.reverse) } finally scriptImportCallback = outerScriptImportCallback } @@ -639,7 +645,7 @@ class Interpreter(val compilerBuilder: CompilerBuilder, handleClasspath(jar) } def ivy(coordinates: Dependency*): Unit = { - loadIvy(coordinates:_*) match{ + loadIvy(coordinates: _*) match { case Left(failureMsg) => throw new Exception(failureMsg) case Right(loaded) => @@ -651,13 +657,11 @@ class Interpreter(val compilerBuilder: CompilerBuilder, } } - - lazy val interpApi: InterpAPI = new InterpAPI{ outer => - + lazy val interpApi: InterpAPI = new InterpAPI { outer => val colors = parameters.colors def watch(p: os.Path) = interp.watch(p) - def watchValue[T](v: => T): T = {interp.watchValue(v); v} + def watchValue[T](v: => T): T = { interp.watchValue(v); v } val beforeExitHooks = interp.beforeExitHooks @@ -668,7 +672,6 @@ class Interpreter(val compilerBuilder: CompilerBuilder, def handleClasspath(jar: java.net.URL) = headFrame.addClasspath(Seq(jar)) - def module(file: os.Path) = { watch(file) val (pkg, wrapper) = ammonite.util.Util.pathToPackageWrapper( @@ -681,15 +684,15 @@ class Interpreter(val compilerBuilder: CompilerBuilder, wrapper, pkg, Seq(Name("ammonite"), Name("$file")), - Some(wd/"Main.sc") + Some(wd / "Main.sc") ), autoImport = true, extraCode = "", hardcoded = false - ) match{ + ) match { case Res.Failure(s) => throw new CompilationError(s) case Res.Exception(t, s) => throw t - case x => //println(x) + case x => // println(x) } } @@ -706,31 +709,32 @@ class Interpreter(val compilerBuilder: CompilerBuilder, } -object Interpreter{ +object Interpreter { - /** @param wrapperNamePrefix - * Name to be used as a prefix for source file and classes wrapping user code, that ends in - * compilation errors or stack traces in particular - */ + /** + * @param wrapperNamePrefix + * Name to be used as a prefix for source file and classes wrapping user code, that ends in + * compilation errors or stack traces in particular + */ case class Parameters( - printer: Printer = Printer( - System.out, - System.err, - System.out, - System.err.println, - System.err.println, - System.err.println - ), - storage: Storage = Storage.InMemory(), - wd: os.Path = os.pwd, - colors: Ref[Colors] = Ref(Colors.Default), - verboseOutput: Boolean = true, - initialClassLoader: ClassLoader = null, - importHooks: Map[Seq[String], ImportHook] = ImportHook.defaults, - alreadyLoadedDependencies: Seq[Dependency] = Nil, - classPathWhitelist: Set[Seq[String]] = Set.empty, - wrapperNamePrefix: String = "cmd", - warnings: Boolean = false + printer: Printer = Printer( + System.out, + System.err, + System.out, + System.err.println, + System.err.println, + System.err.println + ), + storage: Storage = Storage.InMemory(), + wd: os.Path = os.pwd, + colors: Ref[Colors] = Ref(Colors.Default), + verboseOutput: Boolean = true, + initialClassLoader: ClassLoader = null, + importHooks: Map[Seq[String], ImportHook] = ImportHook.defaults, + alreadyLoadedDependencies: Seq[Dependency] = Nil, + classPathWhitelist: Set[Seq[String]] = Set.empty, + wrapperNamePrefix: String = "cmd", + warnings: Boolean = false ) val predefImports = Imports( @@ -752,19 +756,15 @@ object Interpreter{ ImportData("ammonite.repl.ReplBridge.value.codeColorsImplicit") ) - - val SheBang = "#!" val SheBangEndPattern = Pattern.compile(s"""((?m)^!#.*)$newLine""") - - /** - * This gives our cache tags for compile caching. The cache tags are a hash - * of classpath, previous commands (in-same-script), and the block-code. - * Previous commands are hashed in the wrapper names, which are contained - * in imports, so we don't need to pass them explicitly. - */ + * This gives our cache tags for compile caching. The cache tags are a hash + * of classpath, previous commands (in-same-script), and the block-code. + * Previous commands are hashed in the wrapper names, which are contained + * in imports, so we don't need to pass them explicitly. + */ def cacheTag(classpathHash: Array[Byte]): String = { val bytes = ammonite.util.Util.md5Hash(Iterator( classpathHash @@ -774,10 +774,11 @@ object Interpreter{ def skipSheBangLine(code: String) = { val newLineLength = newLine.length + /** - * the skipMultipleLines function is necessary to support the parsing of - * multiple shebang lines. The NixOs nix-shell normally uses 2+ shebang lines. - */ + * the skipMultipleLines function is necessary to support the parsing of + * multiple shebang lines. The NixOs nix-shell normally uses 2+ shebang lines. + */ def skipMultipleLines(ind: Int = 0): Int = { val index = code.indexOf('\n', ind) if (code.substring(index + 1).startsWith(SheBang)) @@ -788,7 +789,7 @@ object Interpreter{ if (code.startsWith(SheBang)) { val matcher = SheBangEndPattern matcher code val shebangEnd = if (matcher.find) matcher.end else skipMultipleLines() - val numberOfStrippedLines = newLine.r.findAllMatchIn( code.substring(0, shebangEnd) ).length + val numberOfStrippedLines = newLine.r.findAllMatchIn(code.substring(0, shebangEnd)).length (newLine * numberOfStrippedLines) + code.substring(shebangEnd) } else code @@ -798,10 +799,12 @@ object Interpreter{ Name(wrapperName.raw + (if (wrapperIndex == 1) "" else "_" + wrapperIndex)) } - def initPrinters(colors0: Colors, - output: OutputStream, - error: OutputStream, - verboseOutput: Boolean) = { + def initPrinters( + colors0: Colors, + output: OutputStream, + error: OutputStream, + verboseOutput: Boolean + ) = { val colors = Ref[Colors](colors0) val printStream = new PrintStream(output, true) val errorPrintStream = new PrintStream(error, true) diff --git a/amm/interp/src/main/scala/ammonite/interp/IvyThing.scala b/amm/interp/src/main/scala/ammonite/interp/IvyThing.scala index 9439da320..ddd0b4f88 100644 --- a/amm/interp/src/main/scala/ammonite/interp/IvyThing.scala +++ b/amm/interp/src/main/scala/ammonite/interp/IvyThing.scala @@ -7,11 +7,10 @@ import coursierapi.{Cache, Dependency, Fetch, Logger, Repository} import scala.collection.JavaConverters._ import scala.util.Try - -object IvyThing{ +object IvyThing { def completer( - repositories: Seq[Repository], - verbose: Boolean + repositories: Seq[Repository], + verbose: Boolean ): String => (Int, Seq[String]) = { val cache = Cache.create() .withLogger(if (verbose) Logger.progressBars() else Logger.nop) @@ -27,11 +26,13 @@ object IvyThing{ .complete() (res.getFrom, res.getCompletions.asScala.toVector) } - def resolveArtifact(repositories: Seq[Repository], - dependencies: Seq[Dependency], - verbose: Boolean, - output: PrintStream, - hooks: Seq[Fetch => Fetch]) = synchronized { + def resolveArtifact( + repositories: Seq[Repository], + dependencies: Seq[Dependency], + verbose: Boolean, + output: PrintStream, + hooks: Seq[Fetch => Fetch] + ) = synchronized { val fetch = Fetch.create() .addDependencies(dependencies: _*) .withRepositories(repositories: _*) @@ -51,8 +52,8 @@ object IvyThing{ def noChangingArtifact = res.getArtifacts.asScala.forall(!_.getKey.isChanging) def noVersionInterval = dependencies.map(_.getVersion).forall { v => !v.startsWith("latest.") && - !v.exists(Set('[', ']', '(', ')')) && - !v.endsWith("+") + !v.exists(Set('[', ']', '(', ')')) && + !v.endsWith("+") } val files = res.getFiles.asScala.toList Right((!customParams && noChangingArtifact && noVersionInterval, files)) @@ -62,4 +63,3 @@ object IvyThing{ val defaultRepositories = Repository.defaults().asScala.toList } - diff --git a/amm/interp/src/main/scala/ammonite/interp/PredefInitialization.scala b/amm/interp/src/main/scala/ammonite/interp/PredefInitialization.scala index cc734cbc7..b147ca85e 100644 --- a/amm/interp/src/main/scala/ammonite/interp/PredefInitialization.scala +++ b/amm/interp/src/main/scala/ammonite/interp/PredefInitialization.scala @@ -1,6 +1,5 @@ package ammonite.interp - import ammonite.interp.api.InterpAPI import ammonite.runtime.{SpecialClassLoader, Storage} import ammonite.util.ScriptOutput.Metadata @@ -8,13 +7,11 @@ import ammonite.util.{ImportData, Imports, Name, PredefInfo, Res} import ammonite.util.Util.CodeSource /** - * The logic around executing an [[Interpreter]]'s predef during - * initialization - */ + * The logic around executing an [[Interpreter]]'s predef during + * initialization + */ object PredefInitialization { - def initBridge[T >: Null <: AnyRef](classloader: SpecialClassLoader, - name: String, - t: T) = { + def initBridge[T >: Null <: AnyRef](classloader: SpecialClassLoader, name: String, t: T) = { classloader.findClassPublic(name + "$") classloader.findClassPublic(name) .getDeclaredMethods @@ -38,33 +35,36 @@ object PredefInitialization { allImports.foldLeft(Imports())(_ ++ _) } - def initBridges(bridges: Seq[(String, String, AnyRef)], - evalClassloader: SpecialClassLoader): Imports = { + def initBridges( + bridges: Seq[(String, String, AnyRef)], + evalClassloader: SpecialClassLoader + ): Imports = { for ((name, shortName, bridge) <- bridges) initBridge(evalClassloader, name, bridge) initBridges(bridges.map { case (name, shortName, _) => (name, shortName) }) } - def apply(interpApi: InterpAPI, - storage: Storage, - basePredefs: Seq[PredefInfo], - customPredefs: Seq[PredefInfo], - processModule: (String, CodeSource, Boolean) => Res[Metadata], - addImports: Imports => Unit, - watch: os.Path => Unit): Res[_] = { + def apply( + interpApi: InterpAPI, + storage: Storage, + basePredefs: Seq[PredefInfo], + customPredefs: Seq[PredefInfo], + processModule: (String, CodeSource, Boolean) => Res[Metadata], + addImports: Imports => Unit, + watch: os.Path => Unit + ): Res[_] = { val predefs = { basePredefs ++ - storage.loadPredef.map{ - case (code, path) => - PredefInfo(Name(path.last.stripSuffix(".sc")), code, false, Some(path)) - } ++ - customPredefs + storage.loadPredef.map { + case (code, path) => + PredefInfo(Name(path.last.stripSuffix(".sc")), code, false, Some(path)) + } ++ + customPredefs } - - Res.fold((), predefs){(_, predefInfo) => + Res.fold((), predefs) { (_, predefInfo) => predefInfo.path.foreach(watch) if (predefInfo.code.isEmpty) Res.Success(()) else { @@ -77,7 +77,7 @@ object PredefInitialization { predefInfo.path ), predefInfo.hardcoded - ) match{ + ) match { case Res.Skip => Res.Success(()) case Res.Success(processed) => addImports(processed.blockInfo.last.hookInfo.imports) diff --git a/amm/interp/src/main/scala/ammonite/interp/Watchable.scala b/amm/interp/src/main/scala/ammonite/interp/Watchable.scala index bc2ed10ca..3aa745364 100644 --- a/amm/interp/src/main/scala/ammonite/interp/Watchable.scala +++ b/amm/interp/src/main/scala/ammonite/interp/Watchable.scala @@ -1,27 +1,29 @@ package ammonite.interp - trait Watchable { def poll(): Long } -object Watchable{ +object Watchable { def mtimeIfExists(p: os.Path) = if (os.exists(p)) os.mtime(p) else 0L + /** - * Recursively mtimes things, with the sole purpose of providing a number - * that will change if that file changes or that folder's contents changes - * - * Ensure we include the file paths within a folder as part of the folder - * signature, as file moves often do not update the mtime but we want to - * trigger a "something changed" event anyway - */ + * Recursively mtimes things, with the sole purpose of providing a number + * that will change if that file changes or that folder's contents changes + * + * Ensure we include the file paths within a folder as part of the folder + * signature, as file moves often do not update the mtime but we want to + * trigger a "something changed" event anyway + */ def pathSignature(p: os.Path) = if (!os.exists(p)) 0L - else try { - if (os.isDir(p)) os.walk(p).map(x => x.hashCode + mtimeIfExists(x)).sum - else os.mtime(p) - } catch { case e: java.nio.file.NoSuchFileException => - 0L - } + else + try { + if (os.isDir(p)) os.walk(p).map(x => x.hashCode + mtimeIfExists(x)).sum + else os.mtime(p) + } catch { + case e: java.nio.file.NoSuchFileException => + 0L + } case class Path(p: os.Path) extends Watchable { def poll() = pathSignature(p) } diff --git a/amm/interp/src/main/scala/ammonite/interp/script/AmmoniteBuildServer.scala b/amm/interp/src/main/scala/ammonite/interp/script/AmmoniteBuildServer.scala index d5865786c..e52dc5b1f 100644 --- a/amm/interp/src/main/scala/ammonite/interp/script/AmmoniteBuildServer.scala +++ b/amm/interp/src/main/scala/ammonite/interp/script/AmmoniteBuildServer.scala @@ -22,13 +22,13 @@ import scala.util.{Failure, Success} import scala.util.control.NonFatal class AmmoniteBuildServer( - compilerBuilder: CompilerBuilder, - parser: Parser, - codeWrapper: CodeWrapper, - initialScripts: Seq[os.Path] = Nil, - initialImports: Imports = AmmoniteBuildServer.defaultImports, - defaultRepositories: Seq[Repository] = Repository.defaults().asScala.toList, - importHooks: Map[Seq[String], ImportHook] = ImportHook.defaults + compilerBuilder: CompilerBuilder, + parser: Parser, + codeWrapper: CodeWrapper, + initialScripts: Seq[os.Path] = Nil, + initialImports: Imports = AmmoniteBuildServer.defaultImports, + defaultRepositories: Seq[Repository] = Repository.defaults().asScala.toList, + importHooks: Map[Seq[String], ImportHook] = ImportHook.defaults ) extends BuildServer with ScalaBuildServer with DummyBuildServerImplems { import AmmoniteBuildServer._ @@ -37,8 +37,12 @@ class AmmoniteBuildServer( private val printer = { val printStream = new PrintStream(System.out) Printer( - printStream, new PrintStream(System.err), printStream, - println, println, println + printStream, + new PrintStream(System.err), + printStream, + println, + println, + println ) } @@ -49,7 +53,6 @@ class AmmoniteBuildServer( verboseOutput = false ) - private val initialClassLoader = if (isolatedApi) classOf[InterpAPI].getClassLoader @@ -76,7 +79,7 @@ class AmmoniteBuildServer( root, importHooks ) - } + } private lazy val compiler = withRoot { root => new ScriptCompiler( @@ -209,7 +212,7 @@ class AmmoniteBuildServer( catch { case NonFatal(e) => System.err.println(s"Caught $e") - // FIXME Log this + // FIXME Log this } } } yield scriptBuildTarget(script, path) @@ -228,8 +231,8 @@ class AmmoniteBuildServer( } private def scriptDependencySources( - script: Script, - target: BuildTargetIdentifier + script: Script, + target: BuildTargetIdentifier ): DependencySourcesItem = { val extra = initialClassPath.filter(_.toASCIIString.endsWith("-sources.jar")) // meh val jars = proc.jarDependencies(script) match { @@ -245,7 +248,7 @@ class AmmoniteBuildServer( } def buildTargetDependencySources( - params: DependencySourcesParams + params: DependencySourcesParams ): CompletableFuture[DependencySourcesResult] = on(resolutionEc) { val items = for { @@ -256,7 +259,7 @@ class AmmoniteBuildServer( } def buildTargetInverseSources( - params: InverseSourcesParams + params: InverseSourcesParams ): CompletableFuture[InverseSourcesResult] = nonBlocking { val uri = params.getTextDocument.getUri @@ -268,10 +271,10 @@ class AmmoniteBuildServer( } private def sendDiagnostics( - client: BuildClient, - mod0: Script, - target: BuildTargetIdentifier, - diagnostics: Seq[Diagnostic] + client: BuildClient, + mod0: Script, + target: BuildTargetIdentifier, + diagnostics: Seq[Diagnostic] ): Unit = { def bspDiagnostic(diagnostic: Diagnostic): BDiagnostic = { @@ -323,11 +326,11 @@ class AmmoniteBuildServer( } private def finishCompiling( - taskId: TaskId, - path: String, - target: BuildTargetIdentifier, - success: Boolean, - diagnostics: Iterable[Diagnostic] + taskId: TaskId, + path: String, + target: BuildTargetIdentifier, + success: Boolean, + diagnostics: Iterable[Diagnostic] ): Unit = for (client <- clientOpt) { @@ -351,8 +354,8 @@ class AmmoniteBuildServer( } private def compileScript( - script: Script, - dependencies: Script.ResolvedDependencies + script: Script, + dependencies: Script.ResolvedDependencies ): ScriptCompileResult = { def actualDiagnostics(result: ScriptCompileResult) = @@ -410,8 +413,8 @@ class AmmoniteBuildServer( } private def scriptScalacOptions( - script: Script, - target: BuildTargetIdentifier + script: Script, + target: BuildTargetIdentifier ): ScalacOptionsItem = { val extra = initialClassPath.filter(!_.toASCIIString.endsWith("-sources.jar")) // meh val scriptDependenciesTargets = proc.dependencies(script) match { @@ -442,7 +445,7 @@ class AmmoniteBuildServer( } def buildTargetScalacOptions( - params: ScalacOptionsParams + params: ScalacOptionsParams ): CompletableFuture[ScalacOptionsResult] = on(resolutionEc) { val items = for { @@ -510,9 +513,9 @@ object AmmoniteBuildServer { } private def alreadyLoadedDependencies( - resourceName: String = - if (isolatedApi) "amm-interp-api-dependencies.txt" - else "amm-dependencies.txt" + resourceName: String = + if (isolatedApi) "amm-interp-api-dependencies.txt" + else "amm-dependencies.txt" ): Seq[Dependency] = { var is: InputStream = null @@ -525,12 +528,14 @@ object AmmoniteBuildServer { .mkString .split('\n') .filter(_.nonEmpty) - .map(l => l.split(':') match { - case Array(org, name, ver) => - Dependency.of(org, name, ver) - case other => - throw new Exception(s"Cannot parse line '$other' from resource $resourceName") - }) + .map(l => + l.split(':') match { + case Array(org, name, ver) => + Dependency.of(org, name, ver) + case other => + throw new Exception(s"Cannot parse line '$other' from resource $resourceName") + } + ) } finally { if (is != null) is.close() @@ -547,7 +552,7 @@ object AmmoniteBuildServer { .toSet private def naiveJavaFutureToScalaFuture[T]( - f: java.util.concurrent.Future[T] + f: java.util.concurrent.Future[T] ): Future[T] = { val p = Promise[T]() val t = new Thread { @@ -564,9 +569,9 @@ object AmmoniteBuildServer { } def start( - server: AmmoniteBuildServer, - input: InputStream = System.in, - output: OutputStream = System.out + server: AmmoniteBuildServer, + input: InputStream = System.in, + output: OutputStream = System.out ): (Launcher[BuildClient], Future[Unit]) = { val ec = Executors.newFixedThreadPool(4, threadFactory("ammonite-bsp-jsonrpc")) val launcher = new Launcher.Builder[BuildClient]() diff --git a/amm/interp/src/main/scala/ammonite/interp/script/Diagnostic.scala b/amm/interp/src/main/scala/ammonite/interp/script/Diagnostic.scala index d50d3f6a6..be9eefa47 100644 --- a/amm/interp/src/main/scala/ammonite/interp/script/Diagnostic.scala +++ b/amm/interp/src/main/scala/ammonite/interp/script/Diagnostic.scala @@ -3,8 +3,8 @@ package ammonite.interp.script import ammonite.util.Position final case class Diagnostic( - severity: String, - start: Position, - end: Position, - message: String + severity: String, + start: Position, + end: Position, + message: String ) diff --git a/amm/interp/src/main/scala/ammonite/interp/script/DummyBuildServerImplems.scala b/amm/interp/src/main/scala/ammonite/interp/script/DummyBuildServerImplems.scala index dfc7ab9f2..615f09dc9 100644 --- a/amm/interp/src/main/scala/ammonite/interp/script/DummyBuildServerImplems.scala +++ b/amm/interp/src/main/scala/ammonite/interp/script/DummyBuildServerImplems.scala @@ -5,10 +5,12 @@ import ch.epfl.scala.bsp4j._ import scala.collection.JavaConverters._ private[script] trait DummyBuildServerImplems extends BuildServer with ScalaBuildServer { - override def buildTargetDependencyModules(dmp: DependencyModulesParams): CompletableFuture[DependencyModulesResult] = + override def buildTargetDependencyModules(dmp: DependencyModulesParams) + : CompletableFuture[DependencyModulesResult] = CompletableFuture.completedFuture(new DependencyModulesResult(List.empty.asJava)) - override def buildTargetOutputPaths(opp: OutputPathsParams): CompletableFuture[OutputPathsResult] = + override def buildTargetOutputPaths(opp: OutputPathsParams) + : CompletableFuture[OutputPathsResult] = CompletableFuture.completedFuture(new OutputPathsResult(List.empty.asJava)) override def buildTargetResources(params: ResourcesParams): CompletableFuture[ResourcesResult] = { @@ -32,7 +34,7 @@ private[script] trait DummyBuildServerImplems extends BuildServer with ScalaBuil } override def buildTargetScalaMainClasses( - params: ScalaMainClassesParams + params: ScalaMainClassesParams ): CompletableFuture[ScalaMainClassesResult] = { val items = params.getTargets.asScala.map { target => new ScalaMainClassesItem(target, List.empty[ScalaMainClass].asJava) @@ -42,7 +44,7 @@ private[script] trait DummyBuildServerImplems extends BuildServer with ScalaBuil } override def buildTargetScalaTestClasses( - params: ScalaTestClassesParams + params: ScalaTestClassesParams ): CompletableFuture[ScalaTestClassesResult] = { val items = params.getTargets.asScala.map { target => new ScalaTestClassesItem(target, List.empty[String].asJava) diff --git a/amm/interp/src/main/scala/ammonite/interp/script/Script.scala b/amm/interp/src/main/scala/ammonite/interp/script/Script.scala index c59fd5043..16fe1370f 100644 --- a/amm/interp/src/main/scala/ammonite/interp/script/Script.scala +++ b/amm/interp/src/main/scala/ammonite/interp/script/Script.scala @@ -6,10 +6,10 @@ import ammonite.runtime.ImportHook import ammonite.util.Name final case class Script( - code: String, - codeSource: CodeSource, - blocks: Seq[Script.Block], - processorDiagnostics: Seq[Diagnostic] + code: String, + codeSource: CodeSource, + blocks: Seq[Script.Block], + processorDiagnostics: Seq[Diagnostic] ) { lazy val dependencyImports: Imports = { @@ -40,19 +40,19 @@ final case class Script( object Script { final case class Import( - code: Either[String, os.Path], - isExec: Boolean, - codeSource: CodeSource, - hookImports: Imports + code: Either[String, os.Path], + isExec: Boolean, + codeSource: CodeSource, + hookImports: Imports ) final case class Dependencies( - scriptDependencies: Seq[Script.Import] = Nil, - dependencies: Seq[coursierapi.Dependency] = Nil, - jarDependencies: Seq[os.Path] = Nil, - pluginDependencies: Seq[coursierapi.Dependency] = Nil, - jarPluginDependencies: Seq[os.Path] = Nil, - extraRepositories: Seq[coursierapi.Repository] = Nil + scriptDependencies: Seq[Script.Import] = Nil, + dependencies: Seq[coursierapi.Dependency] = Nil, + jarDependencies: Seq[os.Path] = Nil, + pluginDependencies: Seq[coursierapi.Dependency] = Nil, + jarPluginDependencies: Seq[os.Path] = Nil, + extraRepositories: Seq[coursierapi.Repository] = Nil ) { def +(other: Dependencies): Dependencies = Dependencies( @@ -75,7 +75,7 @@ object Script { } final case class Options( - extraScalacOptions: Seq[String] = Nil + extraScalacOptions: Seq[String] = Nil ) { def +(other: Options): Options = Options( @@ -88,16 +88,16 @@ object Script { } final case class Block( - startIdx: Int, - leadingSpaces: String, - statements: Seq[String], - imports: Seq[ImportHook.Result] + startIdx: Int, + leadingSpaces: String, + statements: Seq[String], + imports: Seq[ImportHook.Result] ) final case class ResolvedDependencies( - jars: Seq[os.Path], - pluginJars: Seq[os.Path], - byteCode: Seq[(String, Array[Byte])] + jars: Seq[os.Path], + pluginJars: Seq[os.Path], + byteCode: Seq[(String, Array[Byte])] ) private def dependencies(hookRes: ImportHook.Result): Dependencies = diff --git a/amm/interp/src/main/scala/ammonite/interp/script/ScriptCache.scala b/amm/interp/src/main/scala/ammonite/interp/script/ScriptCache.scala index aad4ecf2b..18f8dfc4b 100644 --- a/amm/interp/src/main/scala/ammonite/interp/script/ScriptCache.scala +++ b/amm/interp/src/main/scala/ammonite/interp/script/ScriptCache.scala @@ -9,8 +9,8 @@ import ch.epfl.scala.bsp4j.{BuildTargetEvent, BuildTargetEventKind, BuildTargetI import scala.collection.JavaConverters._ final class ScriptCache( - proc: ScriptProcessor, - onBuildTargetEvents: Seq[BuildTargetEvent] => Unit + proc: ScriptProcessor, + onBuildTargetEvents: Seq[BuildTargetEvent] => Unit ) { private val cache = new ConcurrentHashMap[String, Script] @@ -67,7 +67,7 @@ final class ScriptCache( previousOpt = Option(cache.put(id, script)) if previousOpt.forall { newScript => newScript.dependencies != script.dependencies || - newScript.options != script.options + newScript.options != script.options } } yield { val event = new BuildTargetEvent(new BuildTargetIdentifier(id)) diff --git a/amm/interp/src/main/scala/ammonite/interp/script/ScriptCompileResult.scala b/amm/interp/src/main/scala/ammonite/interp/script/ScriptCompileResult.scala index f64c57b3a..34b7bb76d 100644 --- a/amm/interp/src/main/scala/ammonite/interp/script/ScriptCompileResult.scala +++ b/amm/interp/src/main/scala/ammonite/interp/script/ScriptCompileResult.scala @@ -3,6 +3,6 @@ package ammonite.interp.script import ammonite.compiler.iface.Compiler.Output final case class ScriptCompileResult( - diagnostics: Seq[Diagnostic], - errorOrOutput: Either[String, Seq[Output]] + diagnostics: Seq[Diagnostic], + errorOrOutput: Either[String, Seq[Output]] ) diff --git a/amm/interp/src/main/scala/ammonite/interp/script/ScriptCompiler.scala b/amm/interp/src/main/scala/ammonite/interp/script/ScriptCompiler.scala index 1aabc2852..4bbf9971e 100644 --- a/amm/interp/src/main/scala/ammonite/interp/script/ScriptCompiler.scala +++ b/amm/interp/src/main/scala/ammonite/interp/script/ScriptCompiler.scala @@ -10,17 +10,17 @@ import scala.collection.JavaConverters._ import scala.collection.mutable final class ScriptCompiler( - compilerBuilder: CompilerBuilder, - storage: Storage, - printer: Printer, // TODO Remove this - codeWrapper: CodeWrapper, - initialClassLoader: ClassLoader, - initialImports: Imports, - classPathWhitelist: Set[Seq[String]], - wd: Option[os.Path], - outputDirectory: Option[os.Path], - generateSemanticDbs: Boolean, - inMemoryCache: Boolean + compilerBuilder: CompilerBuilder, + storage: Storage, + printer: Printer, // TODO Remove this + codeWrapper: CodeWrapper, + initialClassLoader: ClassLoader, + initialImports: Imports, + classPathWhitelist: Set[Seq[String]], + wd: Option[os.Path], + outputDirectory: Option[os.Path], + generateSemanticDbs: Boolean, + inMemoryCache: Boolean ) { import ScriptProcessor.SeqOps @@ -29,12 +29,12 @@ final class ScriptCompiler( /** Compiles a script, along with its dependencies */ def compile( - module: Script, - processor: ScriptProcessor, - doCompile: (Script, Script.ResolvedDependencies) => ScriptCompileResult = compile(_, _) + module: Script, + processor: ScriptProcessor, + doCompile: (Script, Script.ResolvedDependencies) => ScriptCompileResult = compile(_, _) ): ( - Map[Script, Seq[Diagnostic]], - Either[String, Seq[AmmCompiler.Output]] + Map[Script, Seq[Diagnostic]], + Either[String, Seq[AmmCompiler.Output]] ) = { val diagnostics = new mutable.HashMap[Script, Seq[Diagnostic]] @@ -76,8 +76,8 @@ final class ScriptCompiler( * written on disk. */ def compile( - module: Script, - dependencies: Script.ResolvedDependencies + module: Script, + dependencies: Script.ResolvedDependencies ): ScriptCompileResult = compileIfNeeded(moduleSettings(module), module, dependencies) @@ -85,8 +85,8 @@ final class ScriptCompiler( * Reads compilation output from cache. */ def compileFromCache( - script: Script, - dependencies: Script.ResolvedDependencies + script: Script, + dependencies: Script.ResolvedDependencies ): Option[ScriptCompileResult] = { val settingsArgs = moduleSettings(script) compileFromCache(settingsArgs, script, dependencies) @@ -149,9 +149,9 @@ final class ScriptCompiler( } private final case class InMemoryCacheKey( - settings: Seq[String], - script: Script, - dependencies: Script.ResolvedDependencies + settings: Seq[String], + script: Script, + dependencies: Script.ResolvedDependencies ) { def stale: Boolean = script.codeSource.path.exists { path => @@ -178,14 +178,14 @@ final class ScriptCompiler( try os.remove.all(dir) catch { case _: java.nio.file.DirectoryNotEmptyException => - // Can happen on Windows, if any of the file we try to delete is opened elsewhere + // Can happen on Windows, if any of the file we try to delete is opened elsewhere } - } + } private def compileFromCache( - settingsArgs: Seq[String], - script: Script, - dependencies: Script.ResolvedDependencies + settingsArgs: Seq[String], + script: Script, + dependencies: Script.ResolvedDependencies ): Option[ScriptCompileResult] = if (inMemoryCache && script.codeSource.path.nonEmpty) { cleanUpCache() @@ -195,9 +195,9 @@ final class ScriptCompiler( None private def compileIfNeeded( - settingsArgs: Seq[String], - script: Script, - dependencies: Script.ResolvedDependencies + settingsArgs: Seq[String], + script: Script, + dependencies: Script.ResolvedDependencies ): ScriptCompileResult = if (inMemoryCache && script.codeSource.path.nonEmpty) { val key = InMemoryCacheKey(settingsArgs, script, dependencies) @@ -211,9 +211,9 @@ final class ScriptCompiler( doCompile(settingsArgs, script, dependencies) private def doCompile( - settingsArgs: Seq[String], - module: Script, - dependencies: Script.ResolvedDependencies + settingsArgs: Seq[String], + module: Script, + dependencies: Script.ResolvedDependencies ): ScriptCompileResult = { val compiler = new SingleScriptCompiler( diff --git a/amm/interp/src/main/scala/ammonite/interp/script/ScriptProcessor.scala b/amm/interp/src/main/scala/ammonite/interp/script/ScriptProcessor.scala index b046e6494..40f9d4a12 100644 --- a/amm/interp/src/main/scala/ammonite/interp/script/ScriptProcessor.scala +++ b/amm/interp/src/main/scala/ammonite/interp/script/ScriptProcessor.scala @@ -12,21 +12,21 @@ import coursierapi.{Dependency, Repository} import scala.collection.mutable final case class ScriptProcessor( - scalaVersion: String, - parser: Parser, - codeWrapper: CodeWrapper, - dependencyLoader: DependencyLoader, - defaultRepositories: Seq[Repository], - extraPluginDependencies: Seq[Dependency] = Nil, - wd: os.Path = os.pwd, - importHooks: Map[Seq[String], ImportHook] = ImportHook.defaults + scalaVersion: String, + parser: Parser, + codeWrapper: CodeWrapper, + dependencyLoader: DependencyLoader, + defaultRepositories: Seq[Repository], + extraPluginDependencies: Seq[Dependency] = Nil, + wd: os.Path = os.pwd, + importHooks: Map[Seq[String], ImportHook] = ImportHook.defaults ) { self => import ScriptProcessor._ def load( - code: String, - codeSource: CodeSource + code: String, + codeSource: CodeSource ): Script = { val rawCode = Interpreter.skipSheBangLine(code) @@ -57,9 +57,9 @@ final case class ScriptProcessor( } def hookResults( - hookPrefix: Seq[String], - hook: ImportHook, - tree: ImportTree + hookPrefix: Seq[String], + hook: ImportHook, + tree: ImportTree ): Either[Diagnostic, Seq[ImportHook.Result]] = { val r = hook.handle( codeSource, @@ -189,7 +189,6 @@ object ScriptProcessor { .map(_.dependencies.copy(scriptDependencies = Nil)) .foldLeft(Script.Dependencies())(_ + _) - private[interp] implicit class SeqOps[T](private val l: Seq[T]) extends AnyVal { def traverse[L, R](f: T => Either[L, R]): Either[Seq[L], Seq[R]] = { val lefts = new mutable.ListBuffer[L] diff --git a/amm/interp/src/main/scala/ammonite/interp/script/SemanticdbProcessor.scala b/amm/interp/src/main/scala/ammonite/interp/script/SemanticdbProcessor.scala index 159fcbc67..8d0135cfe 100644 --- a/amm/interp/src/main/scala/ammonite/interp/script/SemanticdbProcessor.scala +++ b/amm/interp/src/main/scala/ammonite/interp/script/SemanticdbProcessor.scala @@ -10,12 +10,12 @@ import scala.meta.internal.semanticdb._ object SemanticdbProcessor { def postProcess( - module: Script, - wd: Option[os.Path], - adjust: Int => (Int, Int) => Option[(Int, Int)], - target: os.Path, - originalSource: os.RelPath, - destSource: os.RelPath + module: Script, + wd: Option[os.Path], + adjust: Int => (Int, Int) => Option[(Int, Int)], + target: os.Path, + originalSource: os.RelPath, + destSource: os.RelPath ): Unit = { val semanticDbDir = target / "META-INF" / "semanticdb" val orig = semanticDbDir / @@ -31,12 +31,11 @@ object SemanticdbProcessor { for { (startLine, startChar) <- adjust0(range.startLine, range.startCharacter) (endLine, endChar) <- adjust0(range.endLine, range.endCharacter) - } - yield range - .withStartLine(startLine) - .withStartCharacter(startChar) - .withEndLine(endLine) - .withEndCharacter(endChar) + } yield range + .withStartLine(startLine) + .withStartCharacter(startChar) + .withEndLine(endLine) + .withEndCharacter(endChar) } def updateTrees(trees: Seq[Tree]): Option[Seq[Tree]] = diff --git a/amm/interp/src/main/scala/ammonite/interp/script/SingleScriptCompiler.scala b/amm/interp/src/main/scala/ammonite/interp/script/SingleScriptCompiler.scala index 7d04353a2..381603472 100644 --- a/amm/interp/src/main/scala/ammonite/interp/script/SingleScriptCompiler.scala +++ b/amm/interp/src/main/scala/ammonite/interp/script/SingleScriptCompiler.scala @@ -15,20 +15,20 @@ import scala.collection.mutable * discarded right after having called `apply` or `writeSources`. */ class SingleScriptCompiler( - compilerBuilder: CompilerBuilder, - initialClassLoader: ClassLoader, - storage: Storage, - printer: Printer, - initialImports: Imports, - classPathWhitelist: Set[Seq[String]], - codeWrapper: CodeWrapper, - wd: Option[os.Path], - generateSemanticDbs: Boolean, - settings: Seq[String], - module: Script, - dependencies: Script.ResolvedDependencies, - moduleTarget: Option[os.Path], - moduleSources: Option[os.Path] + compilerBuilder: CompilerBuilder, + initialClassLoader: ClassLoader, + storage: Storage, + printer: Printer, + initialImports: Imports, + classPathWhitelist: Set[Seq[String]], + codeWrapper: CodeWrapper, + wd: Option[os.Path], + generateSemanticDbs: Boolean, + settings: Seq[String], + module: Script, + dependencies: Script.ResolvedDependencies, + moduleTarget: Option[os.Path], + moduleSources: Option[os.Path] ) { private var messages = new mutable.ListBuffer[Diagnostic] @@ -90,7 +90,6 @@ class SingleScriptCompiler( private val offsetToPosSc = PositionOffsetConversion.offsetToPos(module.code) - private def clearByteCodeDir(): Unit = // remove only files from the target directory, not directories // (removing directories can confuse BSP clients with file watchers) @@ -124,7 +123,7 @@ class SingleScriptCompiler( } private def updateSemanticDbs( - blocksOffsetAndCode: Vector[(Int, String)] + blocksOffsetAndCode: Vector[(Int, String)] ): Unit = { def adjust(blockIdx: Int): (Int, Int) => Option[(Int, Int)] = @@ -159,7 +158,7 @@ class SingleScriptCompiler( .pkgName .map(_.raw) .toVector :+ - s"${name.raw}.scala" + s"${name.raw}.scala" ) val destRelPath = os.SubPath(segments0.toVector) @@ -167,11 +166,10 @@ class SingleScriptCompiler( } } - private def compileBlock( - scriptImports: Imports, - block: Script.Block, - blockIdx: Int + scriptImports: Imports, + block: Script.Block, + blockIdx: Int ): Res[(Imports, Int, String, Compiler.Output)] = { val indexedWrapperName = Interpreter.indexWrapperName( diff --git a/amm/repl/api/src/main/scala-2.12/ammonite/repl/api/History.scala b/amm/repl/api/src/main/scala-2.12/ammonite/repl/api/History.scala index 3030b2252..9727e51e3 100644 --- a/amm/repl/api/src/main/scala-2.12/ammonite/repl/api/History.scala +++ b/amm/repl/api/src/main/scala-2.12/ammonite/repl/api/History.scala @@ -3,28 +3,26 @@ package ammonite.repl.api import scala.collection.generic.CanBuildFrom import scala.collection.{IndexedSeqLike, mutable} - class History(s: Vector[String]) -extends IndexedSeq[String] -with IndexedSeqLike[String, History] { + extends IndexedSeq[String] + with IndexedSeqLike[String, History] { def length: Int = s.length def apply(idx: Int): String = s.apply(idx) override def newBuilder = History.builder } -object History{ +object History { def builder = new mutable.Builder[String, History] { val buffer = mutable.Buffer.empty[String] - def +=(elem: String): this.type = {buffer += elem; this} + def +=(elem: String): this.type = { buffer += elem; this } def result(): History = new History(buffer.toVector) def clear(): Unit = buffer.clear() } - implicit def cbf = new CanBuildFrom[History, String, History]{ + implicit def cbf = new CanBuildFrom[History, String, History] { def apply(from: History) = builder def apply() = builder } implicit def toHistory(s: Seq[String]): History = new History(s.toVector) } - diff --git a/amm/repl/api/src/main/scala-2.13-or-3/ammonite/repl/api/History.scala b/amm/repl/api/src/main/scala-2.13-or-3/ammonite/repl/api/History.scala index 43f811c95..abe0b7310 100644 --- a/amm/repl/api/src/main/scala-2.13-or-3/ammonite/repl/api/History.scala +++ b/amm/repl/api/src/main/scala-2.13-or-3/ammonite/repl/api/History.scala @@ -3,9 +3,8 @@ package ammonite.repl.api import scala.collection.generic.IsSeq import scala.collection.{Iterable, SeqOps, mutable} - class History(s: Vector[String]) - extends IndexedSeq[String] { + extends IndexedSeq[String] { def length: Int = s.length def apply(idx: Int): String = s.apply(idx) @@ -16,7 +15,7 @@ class History(s: Vector[String]) object History { def builder = new mutable.Builder[String, History] { val buffer = mutable.Buffer.empty[String] - def addOne(elem: String): this.type = {buffer += elem; this} + def addOne(elem: String): this.type = { buffer += elem; this } def result(): History = new History(buffer.toVector) @@ -24,4 +23,3 @@ object History { } implicit def toHistory(s: Seq[String]): History = new History(s.toVector) } - diff --git a/amm/repl/api/src/main/scala-2/ammonite/repl/api/ReplAPIScalaVersionSpecific.scala b/amm/repl/api/src/main/scala-2/ammonite/repl/api/ReplAPIScalaVersionSpecific.scala index 1e798de29..8250fabff 100644 --- a/amm/repl/api/src/main/scala-2/ammonite/repl/api/ReplAPIScalaVersionSpecific.scala +++ b/amm/repl/api/src/main/scala-2/ammonite/repl/api/ReplAPIScalaVersionSpecific.scala @@ -13,7 +13,6 @@ trait ReplAPIScalaVersionSpecific { /** * Get the `Type` object representing the type of `t`. Useful * for finding what its methods are and what you can do with it - * */ def typeOf[T: WeakTypeTag](t: => T): Type diff --git a/amm/repl/api/src/main/scala/ammonite/repl/FullReplAPI.scala b/amm/repl/api/src/main/scala/ammonite/repl/FullReplAPI.scala index 4f9fd8aac..73ec8d3fb 100644 --- a/amm/repl/api/src/main/scala/ammonite/repl/FullReplAPI.scala +++ b/amm/repl/api/src/main/scala/ammonite/repl/FullReplAPI.scala @@ -23,7 +23,6 @@ trait FullReplAPI extends ReplAPI with FullReplAPIScalaVersionSpecific { replApi |description of how to use the REPL, check out https://ammonite.io """.stripMargin.trim - protected[this] def replArgs0: IndexedSeq[Bind[_]] protected[this] def internal0: FullReplAPI.Internal = new FullReplAPI.Internal { @@ -33,9 +32,9 @@ trait FullReplAPI extends ReplAPI with FullReplAPIScalaVersionSpecific { replApi } /** - * This stuff is used for the REPL-generated code that prints things; - * shouldn't really be used by users, but needs to be public and accessible - */ + * This stuff is used for the REPL-generated code that prints things; + * shouldn't really be used by users, but needs to be public and accessible + */ lazy val Internal: FullReplAPI.Internal = internal0 } @@ -48,16 +47,15 @@ object FullReplAPI { def combinePrints(iters: Iterator[String]*) = { iters.toIterator - .filter(_.nonEmpty) - .flatMap(Iterator(newLine) ++ _) - .drop(1) + .filter(_.nonEmpty) + .flatMap(Iterator(newLine) ++ _) + .drop(1) } - def print[T: pprint.TPrint](value: => T, - ident: String, - custom: Option[String]) - (implicit tcolors: pprint.TPrintColors, - classTagT: ClassTag[T] = null) = { + def print[T: pprint.TPrint](value: => T, ident: String, custom: Option[String])(implicit + tcolors: pprint.TPrintColors, + classTagT: ClassTag[T] = null + ) = { // Here we use ClassTag to detect if T is an Unit. // // The default value null suppresses the compilation error when T is a singleton type, @@ -70,7 +68,7 @@ object FullReplAPI { // We don't use WeakTypeTag or TypeTag because those type classes are too heavy-weight, // as Scalac will generate a huge amount of code for creating a TypeTag for refinement types. // See https://github.com/lihaoyi/Ammonite/issues/649 for further information. - // + // // We do not check `value == ()`, because that would force evaluation of `value`, which // may be defined as a `lazy val` which the user explicitly does not want to evaluate val isUnit = classTagT == classTag[Unit] @@ -83,8 +81,10 @@ object FullReplAPI { // pretty-printing of the main body val prefix = new pprint.Truncated( Iterator( - colors().ident()(ident).render, ": ", - implicitly[pprint.TPrint[T]].render(tcolors), " = " + colors().ident()(ident).render, + ": ", + implicitly[pprint.TPrint[T]].render(tcolors), + " = " ), pprinter().defaultWidth, pprinter().defaultHeight @@ -108,7 +108,9 @@ object FullReplAPI { } def printDef(definitionLabel: String, ident: String) = { Iterator( - "defined ", colors().`type`()(definitionLabel).render, " ", + "defined ", + colors().`type`()(definitionLabel).render, + " ", colors().ident()(ident).render ) } @@ -119,4 +121,3 @@ object FullReplAPI { } object ReplBridge extends APIHolder[FullReplAPI] - diff --git a/amm/repl/api/src/main/scala/ammonite/repl/api/FrontEnd.scala b/amm/repl/api/src/main/scala/ammonite/repl/api/FrontEnd.scala index f047d861f..3a3370af5 100644 --- a/amm/repl/api/src/main/scala/ammonite/repl/api/FrontEnd.scala +++ b/amm/repl/api/src/main/scala/ammonite/repl/api/FrontEnd.scala @@ -8,12 +8,14 @@ import ammonite.util.{Colors, Res} trait FrontEnd { def width: Int def height: Int - def action(input: InputStream, - reader: java.io.Reader, - output: OutputStream, - prompt: String, - colors: Colors, - compilerComplete: (Int, String) => (Int, Seq[String], Seq[String]), - history: IndexedSeq[String], - addHistory: String => Unit): Res[(String, Seq[String])] + def action( + input: InputStream, + reader: java.io.Reader, + output: OutputStream, + prompt: String, + colors: Colors, + compilerComplete: (Int, String) => (Int, Seq[String], Seq[String]), + history: IndexedSeq[String], + addHistory: String => Unit + ): Res[(String, Seq[String])] } diff --git a/amm/repl/api/src/main/scala/ammonite/repl/api/FrontEndAPI.scala b/amm/repl/api/src/main/scala/ammonite/repl/api/FrontEndAPI.scala index 02121b84c..e1dbc6716 100644 --- a/amm/repl/api/src/main/scala/ammonite/repl/api/FrontEndAPI.scala +++ b/amm/repl/api/src/main/scala/ammonite/repl/api/FrontEndAPI.scala @@ -7,5 +7,3 @@ trait FrontEndAPI { } object FrontEndBridge extends APIHolder[FrontEndAPI] - - diff --git a/amm/repl/api/src/main/scala/ammonite/repl/api/ReplAPI.scala b/amm/repl/api/src/main/scala/ammonite/repl/api/ReplAPI.scala index 1b5e8ad4e..75143a1dc 100644 --- a/amm/repl/api/src/main/scala/ammonite/repl/api/ReplAPI.scala +++ b/amm/repl/api/src/main/scala/ammonite/repl/api/ReplAPI.scala @@ -21,13 +21,13 @@ trait ReplAPI extends ReplAPIScalaVersionSpecific { def help: String /** - * The last exception that was thrown in the REPL; `null` if nothing has - * yet been thrown. Useful if you want additional information from the - * thrown exception than the printed stack trace (e.g. many exceptions have - * additional metadata attached) or if you want to show the stack trace - * on an exception that doesn't normally print it (e.g. seeing the stack - * when a Ctrl-C interrupt happened) via `lastException.printStackTrace`. - */ + * The last exception that was thrown in the REPL; `null` if nothing has + * yet been thrown. Useful if you want additional information from the + * thrown exception than the printed stack trace (e.g. many exceptions have + * additional metadata attached) or if you want to show the stack trace + * on an exception that doesn't normally print it (e.g. seeing the stack + * when a Ctrl-C interrupt happened) via `lastException.printStackTrace`. + */ def lastException: Throwable /** @@ -60,25 +60,25 @@ trait ReplAPI extends ReplAPIScalaVersionSpecific { def imports: Imports /** - * If class wrapping is enabled, this lists the names of the previous commands - * that the current commands actually references (as told by the scalac). - * - * E.g. in a session like - * ``` - * @ val n = 2 - * n: Int = 2 - * - * @ val p = 1 - * p: Int = 1 - * - * @ n + p - * res2: Int = 3 - * ``` - * this would have returned an empty list if called from the same line as `val n = 2` - * or `val p = 1`. This would have returned `Seq("cmd0", "cmd1")` if called - * from the same line as `n + p`, as both `cmd0`, that defines `n`, and `cmd1`, that - * defines `p`, are referenced from this line. - */ + * If class wrapping is enabled, this lists the names of the previous commands + * that the current commands actually references (as told by the scalac). + * + * E.g. in a session like + * ``` + * @ val n = 2 + * n: Int = 2 + * + * @ val p = 1 + * p: Int = 1 + * + * @ n + p + * res2: Int = 3 + * ``` + * this would have returned an empty list if called from the same line as `val n = 2` + * or `val p = 1`. This would have returned `Seq("cmd0", "cmd1")` if called + * from the same line as `n + p`, as both `cmd0`, that defines `n`, and `cmd1`, that + * defines `p`, are referenced from this line. + */ def usedEarlierDefinitions: Seq[String] /** @@ -108,26 +108,23 @@ trait ReplAPI extends ReplAPIScalaVersionSpecific { * disables truncation and prints the entire thing, but you can set other * parameters as well if you want. */ - def show(t: Any, - width: Integer = null, - height: Integer = null, - indent: Integer = null): Unit - - /** - * Functions that can be used to manipulate the current REPL session: - * check-pointing progress, reverting to earlier checkpoints, or deleting - * checkpoints by name. - * - * Frames get pushed on a stack; by default, a saved frame is - * accessible simply by calling `load`. If you provide a name - * when `save`ing a checkpoint, it can later be `load`ed directly - * by providing the same name to `load` - * - * Un-named checkpoints are garbage collected, together with their - * classloader and associated data, when they are no longer accessible - * due to `restore`. Named checkpoints are kept forever; call `delete` - * on them if you really want them to go away. - */ + def show(t: Any, width: Integer = null, height: Integer = null, indent: Integer = null): Unit + + /** + * Functions that can be used to manipulate the current REPL session: + * check-pointing progress, reverting to earlier checkpoints, or deleting + * checkpoints by name. + * + * Frames get pushed on a stack; by default, a saved frame is + * accessible simply by calling `load`. If you provide a name + * when `save`ing a checkpoint, it can later be `load`ed directly + * by providing the same name to `load` + * + * Un-named checkpoints are garbage collected, together with their + * classloader and associated data, when they are no longer accessible + * due to `restore`. Named checkpoints are kept forever; call `delete` + * on them if you really want them to go away. + */ def sess: Session def load: ReplLoad @@ -137,69 +134,69 @@ trait ReplAPI extends ReplAPIScalaVersionSpecific { def _compilerManager: ammonite.compiler.iface.CompilerLifecycleManager } -trait ReplLoad{ +trait ReplLoad { /** - * Loads a command into the REPL and - * evaluates them one after another - */ + * Loads a command into the REPL and + * evaluates them one after another + */ def apply(line: String): Unit /** - * Loads and executes the scriptfile on the specified path. - * Compilation units separated by `@\n` are evaluated sequentially. - * If an error happens it prints an error message to the console. - */ + * Loads and executes the scriptfile on the specified path. + * Compilation units separated by `@\n` are evaluated sequentially. + * If an error happens it prints an error message to the console. + */ def exec(path: os.Path): Unit } -trait Session{ +trait Session { /** - * The current stack of frames - */ + * The current stack of frames + */ def frames: List[Frame] /** - * Checkpoints your current work, placing all future work into its own - * frames. If a name is provided, it can be used to quickly recover - * that checkpoint later. - */ + * Checkpoints your current work, placing all future work into its own + * frames. If a name is provided, it can be used to quickly recover + * that checkpoint later. + */ def save(name: String = ""): Unit /** - * Discards the last frames, effectively reverting your session to - * the last `save`-ed checkpoint. If a name is provided, it instead reverts - * your session to the checkpoint with that name. - */ + * Discards the last frames, effectively reverting your session to + * the last `save`-ed checkpoint. If a name is provided, it instead reverts + * your session to the checkpoint with that name. + */ def load(name: String = ""): SessionChanged /** - * Resets you to the last save point. If you pass in `num`, it resets - * you to that many savepoints since the last one. - */ + * Resets you to the last save point. If you pass in `num`, it resets + * you to that many savepoints since the last one. + */ def pop(num: Int = 1): SessionChanged /** - * Deletes a named checkpoint, allowing it to be garbage collected if it - * is no longer accessible. - */ + * Deletes a named checkpoint, allowing it to be garbage collected if it + * is no longer accessible. + */ def delete(name: String): Unit } -trait Clipboard{ +trait Clipboard { /** - * Reads contents from the system clipboard. - * @return System clipboard contents if they are readable as `String`, - * empty string otherwise. - */ + * Reads contents from the system clipboard. + * @return System clipboard contents if they are readable as `String`, + * empty string otherwise. + */ def read: String - + /** - * Sets the contents of the system clipboard. - * - * @param data New contents for the clipboard. - */ + * Sets the contents of the system clipboard. + * + * @param data New contents for the clipboard. + */ def write(data: geny.Writable): Unit } diff --git a/amm/repl/api/src/main/scala/ammonite/repl/tools/Util.scala b/amm/repl/api/src/main/scala/ammonite/repl/tools/Util.scala index e3f1f8b06..e4c26952e 100644 --- a/amm/repl/api/src/main/scala/ammonite/repl/tools/Util.scala +++ b/amm/repl/api/src/main/scala/ammonite/repl/tools/Util.scala @@ -3,8 +3,8 @@ package ammonite.repl.tools object Util { /** - * Additional [[mainargs.TokensReader]] instance to teach it how to read Ammonite paths - */ + * Additional [[mainargs.TokensReader]] instance to teach it how to read Ammonite paths + */ implicit object PathRead extends mainargs.TokensReader.Simple[os.Path] { def shortName = "path" def read(strs: Seq[String]) = Right(os.Path(strs.last, os.pwd)) diff --git a/amm/repl/api/src/main/scala/ammonite/runtime/tools/Tools.scala b/amm/repl/api/src/main/scala/ammonite/runtime/tools/Tools.scala index 215bb4130..5c8e16d2c 100644 --- a/amm/repl/api/src/main/scala/ammonite/runtime/tools/Tools.scala +++ b/amm/repl/api/src/main/scala/ammonite/runtime/tools/Tools.scala @@ -4,22 +4,17 @@ */ package ammonite.runtime.tools - import java.io.{BufferedReader, InputStreamReader} - import ammonite.util.Util.newLine import scala.collection.{GenTraversableOnce, mutable} import scala.util.matching.Regex - - -trait Grepper[T]{ - def apply(t: T, s: Any) - (implicit pp: pprint.PPrinter = Grepper.defaultPPrint): Option[GrepResult] +trait Grepper[T] { + def apply(t: T, s: Any)(implicit pp: pprint.PPrinter = Grepper.defaultPPrint): Option[GrepResult] } -object Grepper{ +object Grepper { val defaultPPrint = pprint.PPrinter.BlackWhite.copy(defaultHeight = Int.MaxValue) implicit object Str extends Grepper[String] { def apply(t: String, s: Any)(implicit pp: pprint.PPrinter = defaultPPrint) = { @@ -40,18 +35,18 @@ object Grepper{ case class GrepResult(spans: Seq[(Int, Int)], text: fansi.Str) -object GrepResult{ +object GrepResult { case class Color(highlight: fansi.Attrs, dotDotDotColor: fansi.Attrs) - object Color{ + object Color { implicit val defaultColor: Color = Color( fansi.Back.Yellow ++ fansi.Color.Blue, fansi.Attrs.Empty ) } - def grepResultRepr(grepResult: GrepResult, - ctx: pprint.Tree.Ctx) - (implicit highlightColor: Color) = { + def grepResultRepr(grepResult: GrepResult, ctx: pprint.Tree.Ctx)(implicit + highlightColor: Color + ) = { val outputSnippets = mutable.Buffer.empty[fansi.Str] val rangeBuffer = mutable.Buffer.empty[(Int, Int)] var remainingSpans = grepResult.spans.toList @@ -62,17 +57,17 @@ object GrepResult{ val width = ctx.width - ctx.leftOffset * 2 - 6 /** - * Consume all the matches that have been aggregated in `rangeBuffer` and - * generate a single result snippet to show the user. Multiple ranges - * can turn up in the same snippet if they are close enough, and we do - * some math to make sure snippets... - * - * - Do not overlap - * - Are at most `width` wide - * - Have `...` if they're truncated on the left or right - * - Are roughly centered on the ranges they contain, as far as possible - * given the above. - */ + * Consume all the matches that have been aggregated in `rangeBuffer` and + * generate a single result snippet to show the user. Multiple ranges + * can turn up in the same snippet if they are close enough, and we do + * some math to make sure snippets... + * + * - Do not overlap + * - Are at most `width` wide + * - Have `...` if they're truncated on the left or right + * - Are roughly centered on the ranges they contain, as far as possible + * given the above. + */ def generateSnippet() = { val start = rangeBuffer.head._1 val end = rangeBuffer.last._2 @@ -103,8 +98,8 @@ object GrepResult{ val wideEnd = cap(0, shiftedEnd, grepResult.text.length) val colorRanges = - for((rangeStart, rangeEnd) <- rangeBuffer) - yield (highlightColor.highlight, rangeStart - wideStart, rangeEnd - wideStart) + for ((rangeStart, rangeEnd) <- rangeBuffer) + yield (highlightColor.highlight, rangeStart - wideStart, rangeEnd - wideStart) val colored = grepResult.text.substring(wideStart, wideEnd).overlayAll(colorRanges.toSeq) @@ -121,7 +116,7 @@ object GrepResult{ // the acceptable width, and when that happens consume all the stored spans // to generate a snippet. Generate one more snippet at the end too to use up // any un-consumed spans - while(remainingSpans.nonEmpty){ + while (remainingSpans.nonEmpty) { val (start, end) = remainingSpans.head remainingSpans = remainingSpans.tail if (rangeBuffer.nonEmpty && end - rangeBuffer(0)._1 >= width) { @@ -135,16 +130,14 @@ object GrepResult{ } } - /** * Lets you filter a list by searching for a matching string or * regex within the pretty-printed contents. */ object grep { - def apply[T: Grepper] - (pat: T, str: Any) - (implicit c: pprint.PPrinter = Grepper.defaultPPrint) - : Option[GrepResult] = { + def apply[T: Grepper](pat: T, str: Any)(implicit + c: pprint.PPrinter = Grepper.defaultPPrint + ): Option[GrepResult] = { implicitly[Grepper[T]].apply(pat, str) } @@ -152,32 +145,32 @@ object grep { * Magic implicits used to turn the [T: PPrint](t: T) => Option[T] * into a real T => Option[T] by materializing PPrint[T] for various values of T */ - object !{ - implicit def FunkyFunc1(f: ![_]) - (implicit c: pprint.PPrinter = Grepper.defaultPPrint) - : Any => GenTraversableOnce[GrepResult] = { + object ! { + implicit def FunkyFunc1(f: ![_])(implicit + c: pprint.PPrinter = Grepper.defaultPPrint + ): Any => GenTraversableOnce[GrepResult] = { (x: Any) => f.apply(x) } - implicit def FunkyFunc2(f: ![_]) - (implicit c: pprint.PPrinter = Grepper.defaultPPrint) - : Any => Boolean = { + implicit def FunkyFunc2(f: ![_])(implicit + c: pprint.PPrinter = Grepper.defaultPPrint + ): Any => Boolean = { x => f.apply(x).isDefined } } - case class ![T: Grepper](pat: T){ + case class ![T: Grepper](pat: T) { def apply(str: Any)(implicit c: pprint.PPrinter = Grepper.defaultPPrint) = { grep.this.apply(pat, str) } } } -case class tail(interval: Int, prefix: Int) extends Function[os.Path, Iterator[String]]{ +case class tail(interval: Int, prefix: Int) extends Function[os.Path, Iterator[String]] { def apply(arg: os.Path): Iterator[String] = { val is = os.read.inputStream(arg) val br = new BufferedReader(new InputStreamReader(is)) - Iterator.continually{ + Iterator.continually { val line = br.readLine() if (line == null) Thread.sleep(interval) Option(line) @@ -192,10 +185,11 @@ case class tail(interval: Int, prefix: Int) extends Function[os.Path, Iterator[S * or use for other things. */ object tail extends tail(100, 50) + /** - * Records how long the given computation takes to run, returning the duration - * in addition to the return value of that computation - */ + * Records how long the given computation takes to run, returning the duration + * in addition to the return value of that computation + */ object time { def apply[T](t: => T) = { @@ -207,22 +201,24 @@ object time { } } - -object browse{ +object browse { case class Strings(values: Seq[String]) - object Strings{ + object Strings { implicit def stringPrefix(s: String): Strings = Strings(Seq(s)) implicit def stringSeqPrefix(s: Seq[String]): Strings = Strings(s) } // R -> show ansi-colors as colors, M -> show current-browse-% bar val lessViewer = Seq("less", "-RM") - def apply(t: Any, - viewer: Strings = lessViewer, - width: Integer = null, - height: Integer = 9999999, - indent: Integer = null) - (implicit pp: pprint.PPrinter = pprint.PPrinter.Color.copy(defaultHeight = Int.MaxValue), - wd: os.Path = os.pwd) = { + def apply( + t: Any, + viewer: Strings = lessViewer, + width: Integer = null, + height: Integer = 9999999, + indent: Integer = null + )(implicit + pp: pprint.PPrinter = pprint.PPrinter.Color.copy(defaultHeight = Int.MaxValue), + wd: os.Path = os.pwd + ) = { os.proc( viewer.values, diff --git a/amm/repl/api/src/main/scala/ammonite/runtime/tools/package.scala b/amm/repl/api/src/main/scala/ammonite/runtime/tools/package.scala index 4a157525a..ef6da2da2 100644 --- a/amm/repl/api/src/main/scala/ammonite/runtime/tools/package.scala +++ b/amm/repl/api/src/main/scala/ammonite/runtime/tools/package.scala @@ -1,7 +1,7 @@ package ammonite /** - * Things that are available inside the Ammonite REPL, are really convenient - * to have available. - */ + * Things that are available inside the Ammonite REPL, are really convenient + * to have available. + */ package object tools diff --git a/amm/repl/src/main/scala/ammonite/main/Defaults.scala b/amm/repl/src/main/scala/ammonite/main/Defaults.scala index 48cc2ea38..fcd500e42 100644 --- a/amm/repl/src/main/scala/ammonite/main/Defaults.scala +++ b/amm/repl/src/main/scala/ammonite/main/Defaults.scala @@ -8,8 +8,8 @@ import coursierapi.Dependency import scala.io.Codec /** - * Constants used in the default configuration for the Ammonite REPL - */ + * Constants used in the default configuration for the Ammonite REPL + */ object Defaults extends DefaultsScalaVersionSpecific { val welcomeBanner = { @@ -23,7 +23,7 @@ object Defaults extends DefaultsScalaVersionSpecific { def ammoniteHome = os.Path(System.getProperty("user.home")) / ".ammonite" def alreadyLoadedDependencies( - resourceName: String = "amm-dependencies.txt" + resourceName: String = "amm-dependencies.txt" ): Seq[Dependency] = { var is: InputStream = null @@ -36,12 +36,14 @@ object Defaults extends DefaultsScalaVersionSpecific { .mkString .split('\n') .filter(_.nonEmpty) - .map(l => l.split(':') match { - case Array(org, name, ver) => - Dependency.of(org, name, ver) - case other => - throw new Exception(s"Cannot parse line '$other' from resource $resourceName") - }) + .map(l => + l.split(':') match { + case Array(org, name, ver) => + Dependency.of(org, name, ver) + case other => + throw new Exception(s"Cannot parse line '$other' from resource $resourceName") + } + ) } finally { if (is != null) is.close() diff --git a/amm/repl/src/main/scala/ammonite/repl/AmmoniteFrontEnd.scala b/amm/repl/src/main/scala/ammonite/repl/AmmoniteFrontEnd.scala index 0c64ee33c..0f9103b0e 100644 --- a/amm/repl/src/main/scala/ammonite/repl/AmmoniteFrontEnd.scala +++ b/amm/repl/src/main/scala/ammonite/repl/AmmoniteFrontEnd.scala @@ -10,26 +10,28 @@ import fastparse.Parsed import ammonite.util.{Colors, Res} import ammonite.compiler.iface.Parser case class AmmoniteFrontEnd( - parser: Parser, - extraFilters: Filter = Filter.empty -) extends FrontEnd{ + parser: Parser, + extraFilters: Filter = Filter.empty +) extends FrontEnd { def width = FrontEndUtils.width def height = FrontEndUtils.height - def action(input: InputStream, - reader: java.io.Reader, - output: OutputStream, - prompt: String, - colors: Colors, - compilerComplete: (Int, String) => (Int, Seq[String], Seq[String]), - history: IndexedSeq[String], - addHistory: String => Unit) = { - readLine(reader, output, prompt, colors, compilerComplete, history) match{ + def action( + input: InputStream, + reader: java.io.Reader, + output: OutputStream, + prompt: String, + colors: Colors, + compilerComplete: (Int, String) => (Int, Seq[String], Seq[String]), + history: IndexedSeq[String], + addHistory: String => Unit + ) = { + readLine(reader, output, prompt, colors, compilerComplete, history) match { case None => Res.Exit(()) case Some(code) => addHistory(code) - parser.split(code, ignoreIncomplete = false).get match{ + parser.split(code, ignoreIncomplete = false).get match { case Right(value) => Res.Success((code, value)) case Left(error) => Res.Failure(error) } @@ -38,16 +40,18 @@ case class AmmoniteFrontEnd( val cutPasteFilter = ReadlineFilters.CutPasteFilter() - def readLine(reader: java.io.Reader, - output: OutputStream, - prompt: String, - colors: Colors, - compilerComplete: (Int, String) => (Int, Seq[String], Seq[String]), - history: IndexedSeq[String]) = { + def readLine( + reader: java.io.Reader, + output: OutputStream, + prompt: String, + colors: Colors, + compilerComplete: (Int, String) => (Int, Seq[String], Seq[String]), + history: IndexedSeq[String] + ) = { val writer = new OutputStreamWriter(output) - val autocompleteFilter: Filter = Filter.action(SpecialKeys.Tab){ + val autocompleteFilter: Filter = Filter.action(SpecialKeys.Tab) { case TermState(rest, b, c, _) => val (newCursor, completions, details) = TTY.withSttyOverride(TTY.restoreSigInt()) { compilerComplete(c, b.mkString) @@ -79,18 +83,18 @@ case class AmmoniteFrontEnd( "|>" ) - val completions2 = for(comp <- completions.filterNot(blacklisted.contains)) yield { + val completions2 = for (comp <- completions.filterNot(blacklisted.contains)) yield { val (left, right) = comp.splitAt(common.length) (colors.comment()(left) ++ right).render } val stdout = FrontEndUtils.printCompletions(completions2, details2) - .mkString + .mkString if (details.nonEmpty || completions.isEmpty) Printing(TermState(rest, b, c), stdout) - else{ + else { val newBuffer = b.take(newCursor) ++ common ++ b.drop(c) Printing(TermState(rest, newBuffer, newCursor + common.length), stdout) } @@ -101,12 +105,13 @@ case class AmmoniteFrontEnd( val multilineFilter = Filter.action( SpecialKeys.NewLine, ti => parser.split(ti.ts.buffer.mkString).isEmpty - ){ + ) { case TermState(rest, b, c, _) => BasicFilters.injectNewLine(b, c, rest) } val historyFilter = new HistoryFilter( - () => history.reverse, colors.comment() + () => history.reverse, + colors.comment() ) val selectionFilter = GUILikeFilters.SelectionFilter(indent = 2) @@ -124,15 +129,12 @@ case class AmmoniteFrontEnd( BasicFilters.all ) - val res = Terminal.readLine( prompt, reader, writer, allFilters, displayTransform = { (buffer, cursor) => - - val highlighted = fansi.Str(parser.defaultHighlight( buffer.toVector, colors.comment(), @@ -143,11 +145,17 @@ case class AmmoniteFrontEnd( fansi.Attr.Reset ).mkString) val (newBuffer, offset) = SelectionFilter.mangleBuffer( - selectionFilter, highlighted, cursor, colors.selected() + selectionFilter, + highlighted, + cursor, + colors.selected() ) val newNewBuffer = HistoryFilter.mangleBuffer( - historyFilter, newBuffer, cursor, fansi.Underlined.On + historyFilter, + newBuffer, + cursor, + fansi.Underlined.On ) (newNewBuffer, offset) } diff --git a/amm/repl/src/main/scala/ammonite/repl/ApiImpls.scala b/amm/repl/src/main/scala/ammonite/repl/ApiImpls.scala index ffd0b9db3..edb9918a7 100644 --- a/amm/repl/src/main/scala/ammonite/repl/ApiImpls.scala +++ b/amm/repl/src/main/scala/ammonite/repl/ApiImpls.scala @@ -11,7 +11,7 @@ import java.util.Locale import scala.collection.mutable -class SessionApiImpl(frames0: => StableRef[List[Frame]]) extends Session{ +class SessionApiImpl(frames0: => StableRef[List[Frame]]) extends Session { def frames: List[Frame] = frames0() val namedFrames = mutable.Map.empty[String, List[Frame]] @@ -42,7 +42,7 @@ class SessionApiImpl(frames0: => StableRef[List[Frame]]) extends Session{ def pop(num: Int = 1) = { var next = frames - for(i <- 0 until num){ + for (i <- 0 until num) { if (next.tail != Nil) next = next.tail } val out = SessionChanged.delta(frames.head, next.head) @@ -52,7 +52,7 @@ class SessionApiImpl(frames0: => StableRef[List[Frame]]) extends Session{ frames0() = next out } - + def load(name: String = "") = { val next = if (name == "") frames.tail else namedFrames(name) val out = SessionChanged.delta(frames.head, next.head) @@ -67,12 +67,12 @@ class SessionApiImpl(frames0: => StableRef[List[Frame]]) extends Session{ namedFrames.remove(name) } } -trait ReplApiImpl extends FullReplAPI{ +trait ReplApiImpl extends FullReplAPI { implicit def tprintColorsImplicit: pprint.TPrintColors = pprint.TPrintColors( typeColor = colors().`type`() ) - implicit val codeColorsImplicit: CodeColors = new CodeColors{ + implicit val codeColorsImplicit: CodeColors = new CodeColors { def comment = colors().comment() def `type` = colors().`type`() def literal = colors().literal() @@ -95,10 +95,12 @@ trait ReplApiImpl extends FullReplAPI{ def printer: Printer - override def show(t: Any, - width: Integer = null, - height: Integer = 9999999, - indent: Integer = null) = { + override def show( + t: Any, + width: Integer = null, + height: Integer = 9999999, + indent: Integer = null + ) = { pprinter() .tokenize( diff --git a/amm/repl/src/main/scala/ammonite/repl/FrontEndUtils.scala b/amm/repl/src/main/scala/ammonite/repl/FrontEndUtils.scala index 5688cc0c5..4273a95c8 100644 --- a/amm/repl/src/main/scala/ammonite/repl/FrontEndUtils.scala +++ b/amm/repl/src/main/scala/ammonite/repl/FrontEndUtils.scala @@ -2,6 +2,7 @@ package ammonite.repl import scala.annotation.tailrec import ammonite.util.Util.newLine + /** * Created by haoyi on 8/29/15. */ @@ -20,25 +21,23 @@ object FrontEndUtils { val grouped = snippets.toList - .grouped(math.ceil(snippets.length * 1.0 / columns).toInt) - .toList + .grouped(math.ceil(snippets.length * 1.0 / columns).toInt) + .toList - ammonite.util.Util.transpose(grouped).iterator.flatMap{ - case first :+ last => first.map( - x => x ++ " " * (width / columns - x.length) - ) :+ last :+ fansi.Str(newLine) + ammonite.util.Util.transpose(grouped).iterator.flatMap { + case first :+ last => + first.map(x => x ++ " " * (width / columns - x.length)) :+ last :+ fansi.Str(newLine) }.map(_.render) } @tailrec def findPrefix(strings: Seq[String], i: Int = 0): String = { if (strings.count(_.length > i) == 0) strings(0).take(i) - else if(strings.collect{ case x if x.length > i => x(i)}.distinct.length > 1) + else if (strings.collect { case x if x.length > i => x(i) }.distinct.length > 1) strings(0).take(i) else findPrefix(strings, i + 1) } - def printCompletions(completions: Seq[String], - details: Seq[String]): List[String] = { + def printCompletions(completions: Seq[String], details: Seq[String]): List[String] = { val prelude = if (details.length != 0 || completions.length != 0) List(newLine) @@ -48,7 +47,6 @@ object FrontEndUtils { if (details.length == 0) Nil else FrontEndUtils.tabulate(details.map(fansi.Str(_)), FrontEndUtils.width) - val completionText = if (completions.length == 0) Nil else FrontEndUtils.tabulate(completions.map(fansi.Str(_)), FrontEndUtils.width) diff --git a/amm/repl/src/main/scala/ammonite/repl/FrontEnds.scala b/amm/repl/src/main/scala/ammonite/repl/FrontEnds.scala index 3f76f5db1..749cb1b8f 100644 --- a/amm/repl/src/main/scala/ammonite/repl/FrontEnds.scala +++ b/amm/repl/src/main/scala/ammonite/repl/FrontEnds.scala @@ -15,14 +15,13 @@ import ammonite.util.{Catching, Colors, Res} import ammonite.repl.api.FrontEnd import org.jline.reader.impl.DefaultParser - object FrontEnds { class JLineUnix(codeParser: IParser) extends JLineTerm(codeParser) class JLineWindows(codeParser: IParser) extends JLineTerm(codeParser) class JLineTerm(codeParser: IParser) extends FrontEnd { private val term = TerminalBuilder.builder().build() - + private val readerBuilder = LineReaderBuilder.builder().terminal(term) private val ammHighlighter = new AmmHighlighter(codeParser) private val ammCompleter = new AmmCompleter(ammHighlighter) @@ -39,14 +38,16 @@ object FrontEnds { def width = term.getWidth def height = term.getHeight - def action(jInput: InputStream, - jReader: java.io.Reader, - jOutput: OutputStream, - prompt: String, - colors: Colors, - compilerComplete: (Int, String) => (Int, Seq[String], Seq[String]), - historyValues: IndexedSeq[String], - addHistory: String => Unit) = { + def action( + jInput: InputStream, + jReader: java.io.Reader, + jOutput: OutputStream, + prompt: String, + colors: Colors, + compilerComplete: (Int, String) => (Int, Seq[String], Seq[String]), + historyValues: IndexedSeq[String], + addHistory: String => Unit + ) = { ammCompleter.compilerComplete = compilerComplete ammParser.addHistory = addHistory @@ -88,14 +89,16 @@ class AmmCompleter(highlighter: Highlighter) extends Completer { // completion varies from action to action var compilerComplete: (Int, String) => (Int, Seq[String], Seq[String]) = (x, y) => (0, Seq.empty, Seq.empty) - + // used when making a candidate - private val leftDelimiters = Set('.') + private val leftDelimiters = Set('.') private val rightDelimiters = Set('.', '(', '{', '[') - override def complete(reader: LineReader, - line: ParsedLine, - candidates: java.util.List[Candidate]): Unit = { + override def complete( + reader: LineReader, + line: ParsedLine, + candidates: java.util.List[Candidate] + ): Unit = { val (completionBase, completions, sigs) = compilerComplete( line.cursor(), line.line() @@ -103,7 +106,7 @@ class AmmCompleter(highlighter: Highlighter) extends Completer { // display method signature(s) if (sigs.nonEmpty) { reader.getTerminal.writer.println() - sigs.foreach{ sig => + sigs.foreach { sig => val sigHighlighted = highlighter.highlight(reader, sig).toAnsi reader.getTerminal.writer.println(sigHighlighted) } @@ -120,7 +123,7 @@ class AmmCompleter(highlighter: Highlighter) extends Completer { /** Makes a full-word candidate based on autocomplete candidate */ private def makeCandidate(word: String, wordCursor: Int, candidate: String): String = { - val leftFromCursor = word.substring(0, wordCursor) + val leftFromCursor = word.substring(0, wordCursor) val rightFromCursor = word.substring(wordCursor) val left = leftFromCursor.reverse.dropWhile(c => !leftDelimiters.contains(c)).reverse val right = rightFromCursor.dropWhile(c => !rightDelimiters.contains(c)) @@ -129,11 +132,14 @@ class AmmCompleter(highlighter: Highlighter) extends Completer { } class AmmParser(codeParser: IParser) extends Parser { - class AmmoniteParsedLine(line: String, words: java.util.List[String], - wordIndex: Int, wordCursor: Int, cursor: Int, - val stmts: Seq[String] = Seq.empty // needed for interpreter - ) - extends defaultParser.ArgumentList(line, words, wordIndex, wordCursor, cursor) + class AmmoniteParsedLine( + line: String, + words: java.util.List[String], + wordIndex: Int, + wordCursor: Int, + cursor: Int, + val stmts: Seq[String] = Seq.empty // needed for interpreter + ) extends defaultParser.ArgumentList(line, words, wordIndex, wordCursor, cursor) var addHistory: String => Unit = x => () diff --git a/amm/repl/src/main/scala/ammonite/repl/PPrints.scala b/amm/repl/src/main/scala/ammonite/repl/PPrints.scala index 9b9e7f5da..5a6afa643 100644 --- a/amm/repl/src/main/scala/ammonite/repl/PPrints.scala +++ b/amm/repl/src/main/scala/ammonite/repl/PPrints.scala @@ -5,7 +5,7 @@ import ammonite.runtime.tools.GrepResult import ammonite.util.Util import pprint.Renderer -object PPrints{ +object PPrints { def replPPrintHandlers(width: => Int): PartialFunction[Any, pprint.Tree] = { // case x: os.Path => PPrints.pathRepr(x) // case x: os.RelPath => PPrints.relPathRepr(x) @@ -14,13 +14,12 @@ object PPrints{ case t: scala.xml.Elem => pprint.Tree.Lazy(_ => Iterator(t.toString)) } - def reprSection(s: String, cfg: pprint.Tree.Ctx): fansi.Str = { val validIdentifier = "([a-zA-Z_][a-zA-Z_0-9]+)".r - if (validIdentifier.findFirstIn(s) == Some(s)){ + if (validIdentifier.findFirstIn(s) == Some(s)) { cfg.literalColor('\'' + s) - }else{ + } else { cfg.literalColor(pprint.Util.literalize(s)) } } @@ -36,4 +35,3 @@ object PPrints{ ) } - diff --git a/amm/repl/src/main/scala/ammonite/repl/Repl.scala b/amm/repl/src/main/scala/ammonite/repl/Repl.scala index ad8e898e9..853dec1c5 100644 --- a/amm/repl/src/main/scala/ammonite/repl/Repl.scala +++ b/amm/repl/src/main/scala/ammonite/repl/Repl.scala @@ -13,27 +13,29 @@ import coursierapi.Dependency import scala.annotation.tailrec -class Repl(input: InputStream, - output: OutputStream, - error: OutputStream, - storage: Storage, - baseImports: Imports, - basePredefs: Seq[PredefInfo], - customPredefs: Seq[PredefInfo], - wd: os.Path, - welcomeBanner: Option[String], - replArgs: IndexedSeq[Bind[_]] = Vector.empty, - initialColors: Colors = Colors.Default, - replCodeWrapper: CodeWrapper, - scriptCodeWrapper: CodeWrapper, - alreadyLoadedDependencies: Seq[Dependency], - importHooks: Map[Seq[String], ImportHook], - compilerBuilder: CompilerBuilder, - parser: Parser, - initialClassLoader: ClassLoader = - classOf[ammonite.repl.api.ReplAPI].getClassLoader, - classPathWhitelist: Set[Seq[String]], - warnings: Boolean) { repl => +class Repl( + input: InputStream, + output: OutputStream, + error: OutputStream, + storage: Storage, + baseImports: Imports, + basePredefs: Seq[PredefInfo], + customPredefs: Seq[PredefInfo], + wd: os.Path, + welcomeBanner: Option[String], + replArgs: IndexedSeq[Bind[_]] = Vector.empty, + initialColors: Colors = Colors.Default, + replCodeWrapper: CodeWrapper, + scriptCodeWrapper: CodeWrapper, + alreadyLoadedDependencies: Seq[Dependency], + importHooks: Map[Seq[String], ImportHook], + compilerBuilder: CompilerBuilder, + parser: Parser, + initialClassLoader: ClassLoader = + classOf[ammonite.repl.api.ReplAPI].getClassLoader, + classPathWhitelist: Set[Seq[String]], + warnings: Boolean +) { repl => val prompt = Ref("@ ") @@ -51,7 +53,7 @@ class Repl(input: InputStream, val (colors, printer) = Interpreter.initPrinters(initialColors, output, error, true) - val argString = replArgs.zipWithIndex.map{ case (b, idx) => + val argString = replArgs.zipWithIndex.map { case (b, idx) => s""" val ${b.name} = ammonite.repl.ReplBridge.value.Internal.replArgs($idx).value.asInstanceOf[${b.typeName.value}] @@ -61,12 +63,11 @@ class Repl(input: InputStream, val frames = Ref(List(ammonite.runtime.Frame.createInitial(initialClassLoader))) /** - * The current line number of the REPL, used to make sure every snippet - * evaluated can have a distinct name that doesn't collide. - */ + * The current line number of the REPL, used to make sure every snippet + * evaluated can have a distinct name that doesn't collide. + */ var currentLine = 0 - val sess0 = new SessionApiImpl(frames) def imports = frames().head.imports @@ -118,10 +119,10 @@ class Repl(input: InputStream, def width = frontEnd().width def height = frontEnd().height - object load extends ReplLoad with (String => Unit){ + object load extends ReplLoad with (String => Unit) { def apply(line: String) = { - interp.processExec(line, currentLine, () => currentLine += 1) match{ + interp.processExec(line, currentLine, () => currentLine += 1) match { case Res.Failure(s) => throw new CompilationError(s) case Res.Exception(t, s) => throw t case _ => @@ -162,16 +163,15 @@ class Repl(input: InputStream, // code paths, but then the fixed overhead gets larger so not really worth it val code = s"""val array = Seq.tabulate(10)(_*2).toArray.max""" val stmts = parser.split(code).get.toOption.get - interp.processLine(code, stmts, 9999999, silent = true, () => () /*donothing*/) + interp.processLine(code, stmts, 9999999, silent = true, () => () /*donothing*/ ) } - sess0.save() interp.createFrame() val reader = new InputStreamReader(input) - def action() = for{ + def action() = for { _ <- Catching { case Ex(e: ThreadDeath) => Thread.interrupted() @@ -198,10 +198,11 @@ class Repl(input: InputStream, colors(), interp.compilerManager.complete(_, importsForCompletion.toString, _), storage.fullHistory(), - addHistory = (code) => if (code != "") { - storage.fullHistory() = storage.fullHistory() :+ code - history = history :+ code - } + addHistory = (code) => + if (code != "") { + storage.fullHistory() = storage.fullHistory() :+ code + history = history :+ code + } ) out <- interp.processLine(code, stmts, currentLine, false, () => currentLine += 1) } yield { @@ -209,8 +210,6 @@ class Repl(input: InputStream, out } - - def run(): Any = { welcomeBanner .map(_.replace("%SCALA_VERSION%", compilerBuilder.scalaVersion)) @@ -224,7 +223,7 @@ class Repl(input: InputStream, printer.error, lastException = _, colors() - ) match{ + ) match { case None => printer.outStream.println() loop() @@ -239,9 +238,9 @@ class Repl(input: InputStream, } } -object Repl{ +object Repl { def handleOutput(interp: Interpreter, res: Res[Evaluated]): Unit = { - res match{ + res match { case Res.Skip => // do nothing case Res.Exit(value) => interp.compilerManager.shutdownPressy() case Res.Success(ev) => @@ -251,12 +250,14 @@ object Repl{ case _ => () } } - def handleRes(res: Res[Any], - printInfo: String => Unit, - printError: String => Unit, - setLastException: Throwable => Unit, - colors: Colors): Option[Any] = { - res match{ + def handleRes( + res: Res[Any], + printInfo: String => Unit, + printError: String => Unit, + setLastException: Throwable => Unit, + colors: Colors + ): Option[Any] = { + res match { case Res.Exit(value) => printInfo("Bye!") Some(value) @@ -274,10 +275,12 @@ object Repl{ None } } - def highlightFrame(f: StackTraceElement, - error: fansi.Attrs, - highlightError: fansi.Attrs, - source: fansi.Attrs) = { + def highlightFrame( + f: StackTraceElement, + error: fansi.Attrs, + highlightError: fansi.Attrs, + source: fansi.Attrs + ) = { val src = if (f.isNativeMethod) source("Native Method") else if (f.getFileName == null) source("Unknown Source") @@ -290,8 +293,8 @@ object Repl{ } val prefix :+ clsName = f.getClassName.split('.').toSeq - val prefixString = prefix.map(_+'.').mkString("") - val clsNameString = clsName //.replace("$", error("$")) + val prefixString = prefix.map(_ + '.').mkString("") + val clsNameString = clsName // .replace("$", error("$")) val method = error(prefixString) ++ highlightError(clsNameString) ++ error(".") ++ highlightError(f.getMethodName) @@ -303,10 +306,12 @@ object Repl{ x.takeWhile(x => !cutoff(x.getMethodName)) } - def showException(ex: Throwable, - error: fansi.Attrs, - highlightError: fansi.Attrs, - source: fansi.Attrs) = { + def showException( + ex: Throwable, + error: fansi.Attrs, + highlightError: fansi.Attrs, + source: fansi.Attrs + ) = { val traces = Ex.unapplySeq(ex).get.map(exception => error(exception.toString + newLine + diff --git a/amm/repl/src/main/scala/ammonite/repl/Signaller.scala b/amm/repl/src/main/scala/ammonite/repl/Signaller.scala index a22dc144f..edc703563 100644 --- a/amm/repl/src/main/scala/ammonite/repl/Signaller.scala +++ b/amm/repl/src/main/scala/ammonite/repl/Signaller.scala @@ -13,11 +13,11 @@ object Signaller { * `unsafe` stuff because that's the only way you can make * it properly reset when you're finished. */ -case class Signaller(sigStr: String)(f: => Unit) extends Scoped{ +case class Signaller(sigStr: String)(f: => Unit) extends Scoped { import Signaller._ def apply[T](t: => T): T = { - val handler = new SignalHandler () { + val handler = new SignalHandler() { def handle(sig: Signal) = f } @@ -26,8 +26,8 @@ case class Signaller(sigStr: String)(f: => Unit) extends Scoped{ handlers(sig) = sun.misc.Signal.handle(sig, handler) :: handlers.getOrElse(sig, List()) try t - finally{ - val head::tail = handlers(sig) + finally { + val head :: tail = handlers(sig) handlers(sig) = tail val handlerToRegister = tail.headOption.getOrElse(sun.misc.SignalHandler.SIG_DFL) sun.misc.Signal.handle(sig, handlerToRegister) @@ -39,9 +39,9 @@ case class Signaller(sigStr: String)(f: => Unit) extends Scoped{ * Converts something with a scoped `apply` method into * something which can be similarly used in a for-comprehension */ -trait Scoped{ +trait Scoped { def apply[T](t: => T): T def foreach[T](t: Unit => T): T = apply(t(())) def flatMap[T](t: Unit => T): T = apply(t(())) def map[T](t: Unit => T): T = apply(t(())) -} \ No newline at end of file +} diff --git a/amm/repl/src/main/scala/ammonite/repl/package.scala b/amm/repl/src/main/scala/ammonite/repl/package.scala index fbfe140c7..9d1254179 100644 --- a/amm/repl/src/main/scala/ammonite/repl/package.scala +++ b/amm/repl/src/main/scala/ammonite/repl/package.scala @@ -1,7 +1,7 @@ package ammonite /** - * Everything to do with the interaction of the Ammonite REPL with the user - * and the terminal. Interfacing with ammonite-terminal (or with JLine) - */ + * Everything to do with the interaction of the Ammonite REPL with the user + * and the terminal. Interfacing with ammonite-terminal (or with JLine) + */ package object repl diff --git a/amm/repl/src/test/scala-2.12/ammonite/unit/SourceTests212.scala b/amm/repl/src/test/scala-2.12/ammonite/unit/SourceTests212.scala index d40050427..dd6d1d47f 100644 --- a/amm/repl/src/test/scala-2.12/ammonite/unit/SourceTests212.scala +++ b/amm/repl/src/test/scala-2.12/ammonite/unit/SourceTests212.scala @@ -1,17 +1,15 @@ package ammonite.unit - import utest._ import ammonite.compiler.tools.source.load import ammonite.util.Util import ammonite.util.Util.Location //import fastparse.utils.{ElemSetHelper, Generator, IndexedParserInput} -object SourceTests212 extends TestSuite{ - val tests = Tests{ +object SourceTests212 extends TestSuite { + val tests = Tests { def check(loaded: Location, expectedFileName: String, expected: String, slop: Int = 10) = { - val loadedFileName = loaded.fileName assert(loadedFileName == expectedFileName) // The line number from first bytecode of earliest concrete method @@ -24,9 +22,8 @@ object SourceTests212 extends TestSuite{ assert(nearby.contains(expected)) } - - test("objectInfo"){ - test("fieldsAreTreatedAsObjects"){ + test("objectInfo") { + test("fieldsAreTreatedAsObjects") { // Can't use Java Std Lib methods because SBT screws up classloaders in test suite check( load(com.github.javaparser.JavaToken.INVALID), @@ -36,8 +33,8 @@ object SourceTests212 extends TestSuite{ } } - test("objectMemberInfo"){ - test("implementedBySuperclass"){ + test("objectMemberInfo") { + test("implementedBySuperclass") { // The file has changed names since earlier versions... val list: List[Int] = List(1, 2, 3) @@ -49,7 +46,7 @@ object SourceTests212 extends TestSuite{ } } - test("staticMethod"){ + test("staticMethod") { // Can't use Java Std Lib methods because SBT screws up classloaders in test suite check( load(com.github.javaparser.JavaParser.parseBlock _), @@ -58,23 +55,21 @@ object SourceTests212 extends TestSuite{ ) } - test("fuzz"){ + test("fuzz") { // Feed a bunch of arbitrary classes and methods from a variety of places // through our location-finder logic to try and find edge cases where // things misbehave or blow up - /** - * We only bother to run these "fuzz"-style tests under Scala 2.12, - * because they're too fragile w.r.t. traits/methods moving around - * between major versions of Scala. Nevertheless, this should give us a - * reasonable amount of confidence that the functionality works across - * a range of inputs, and the earlier unit tests should give us some - * confidence it works across a range of Scala versions - */ - - test("List"){ + * We only bother to run these "fuzz"-style tests under Scala 2.12, + * because they're too fragile w.r.t. traits/methods moving around + * between major versions of Scala. Nevertheless, this should give us a + * reasonable amount of confidence that the functionality works across + * a range of inputs, and the earlier unit tests should give us some + * confidence it works across a range of Scala versions + */ + test("List") { test("head") - check(load(List().head), "IterableLike.scala", "def head") test("apply") - check(load(List().apply _), "LinearSeqOptimized.scala", "def apply") @@ -94,12 +89,13 @@ object SourceTests212 extends TestSuite{ test("mkString") - check(load(List().mkString _), "TraversableOnce.scala", "def mkString") test("aggregate") - check( load(List().aggregate _), - "TraversableOnce.scala", "def aggregate" + "TraversableOnce.scala", + "def aggregate" ) // These result in a divering implicit expansion, even in plain Scala - // test("min") - check(load(List().min _), "TraversableOnce.scala", "def min") - // test("max") - check(load(List().max _), "TraversableOnce.scala", "def max") + // test("min") - check(load(List().min _), "TraversableOnce.scala", "def min") + // test("max") - check(load(List().max _), "TraversableOnce.scala", "def max") test("groupBy") - check(load(List().groupBy _), "TraversableLike.scala", "def groupBy") test("compose") - check(load(List().compose _), "Function1.scala", "def compose") @@ -122,7 +118,7 @@ object SourceTests212 extends TestSuite{ "def productIterator" ) } - test("scalaz"){ + test("scalaz") { test("base") - check(load(scalaz.==>>), "Map.scala", "object ==>>") // Some aliases the make our code shorter diff --git a/amm/repl/src/test/scala/ammonite/DualTestRepl.scala b/amm/repl/src/test/scala/ammonite/DualTestRepl.scala index 75ad62ecf..ac9aa0344 100644 --- a/amm/repl/src/test/scala/ammonite/DualTestRepl.scala +++ b/amm/repl/src/test/scala/ammonite/DualTestRepl.scala @@ -4,8 +4,8 @@ import ammonite.compiler.CodeClassWrapper import ammonite.util.{Evaluated, Res} /** - * Wraps several [[TestRepl]], and runs its tests against all of them. - */ + * Wraps several [[TestRepl]], and runs its tests against all of them. + */ class DualTestRepl { dual => def predef: (String, Option[os.Path]) = ("", None) @@ -46,8 +46,7 @@ class DualTestRepl { dual => } def result(input: String, expected: Res[Evaluated]): Unit = repls.foreach(_.result(input, expected)) - def fail(input: String, - failureCheck: String => Boolean = _ => true): Unit = + def fail(input: String, failureCheck: String => Boolean = _ => true): Unit = repls.foreach(_.fail(input, failureCheck)) def notFound(name: String): String = diff --git a/amm/repl/src/test/scala/ammonite/SerializationUtil.scala b/amm/repl/src/test/scala/ammonite/SerializationUtil.scala index 24e62cf7c..6d8baaed8 100644 --- a/amm/repl/src/test/scala/ammonite/SerializationUtil.scala +++ b/amm/repl/src/test/scala/ammonite/SerializationUtil.scala @@ -10,8 +10,7 @@ object SerializationUtil { try { oos.writeObject(m) baos.toByteArray - } - finally oos.close() + } finally oos.close() } def deserialize(b: Array[Byte], loader: ClassLoader): AnyRef = { @@ -24,18 +23,19 @@ object SerializationUtil { // from akka.util /** - * ClassLoaderObjectInputStream tries to utilize the provided ClassLoader - * to load Classes and falls back to ObjectInputStreams resolver. - * - * @param classLoader - the ClassLoader which is to be used primarily - * @param is - the InputStream that is wrapped - */ + * ClassLoaderObjectInputStream tries to utilize the provided ClassLoader + * to load Classes and falls back to ObjectInputStreams resolver. + * + * @param classLoader - the ClassLoader which is to be used primarily + * @param is - the InputStream that is wrapped + */ class ClassLoaderObjectInputStream( - classLoader: ClassLoader, - is: InputStream + classLoader: ClassLoader, + is: InputStream ) extends ObjectInputStream(is) { override protected def resolveClass(objectStreamClass: ObjectStreamClass): Class[_] = - try Class.forName(objectStreamClass.getName, false, classLoader) catch { + try Class.forName(objectStreamClass.getName, false, classLoader) + catch { case cnfe: ClassNotFoundException ⇒ super.resolveClass(objectStreamClass) } } diff --git a/amm/repl/src/test/scala/ammonite/TestRepl.scala b/amm/repl/src/test/scala/ammonite/TestRepl.scala index 63ee33adf..628e0041c 100644 --- a/amm/repl/src/test/scala/ammonite/TestRepl.scala +++ b/amm/repl/src/test/scala/ammonite/TestRepl.scala @@ -36,7 +36,6 @@ class TestRepl(compilerBuilder: ICompilerBuilder = CompilerBuilder()) { self => java.nio.file.Files.createTempDirectory("ammonite-tester") ) - import java.io.ByteArrayOutputStream import java.io.PrintStream @@ -83,25 +82,28 @@ class TestRepl(compilerBuilder: ICompilerBuilder = CompilerBuilder()) { self => wrapperNamePrefix = wrapperNamePrefix.getOrElse(Interpreter.Parameters().wrapperNamePrefix), warnings = warnings ) - val interp = try { - new Interpreter( - compilerBuilder, - () => parser, - getFrame = () => frames().head, - createFrame = () => { val f = sess0.childFrame(frames().head); frames() = f :: frames(); f }, - replCodeWrapper = codeWrapper, - scriptCodeWrapper = codeWrapper, - parameters = interpParams - ) - - }catch{ case e: Throwable => - println(infoBuffer.mkString) - println(outString) - println(resString) - println(warningBuffer.mkString) - println(errorBuffer.mkString) - throw e - } + val interp = + try { + new Interpreter( + compilerBuilder, + () => parser, + getFrame = () => frames().head, + createFrame = + () => { val f = sess0.childFrame(frames().head); frames() = f :: frames(); f }, + replCodeWrapper = codeWrapper, + scriptCodeWrapper = codeWrapper, + parameters = interpParams + ) + + } catch { + case e: Throwable => + println(infoBuffer.mkString) + println(outString) + println(resString) + println(warningBuffer.mkString) + println(errorBuffer.mkString) + throw e + } val extraBridges = Seq( ( @@ -132,10 +134,10 @@ class TestRepl(compilerBuilder: ICompilerBuilder = CompilerBuilder()) { self => def width = 80 def height = 80 - object load extends ReplLoad with (String => Unit){ + object load extends ReplLoad with (String => Unit) { def apply(line: String) = { - interp.processExec(line, currentLine, () => currentLine += 1) match{ + interp.processExec(line, currentLine, () => currentLine += 1) match { case Res.Failure(s) => throw new CompilationError(s) case Res.Exception(t, s) => throw t case _ => @@ -155,12 +157,12 @@ class TestRepl(compilerBuilder: ICompilerBuilder = CompilerBuilder()) { self => def replArgs: IndexedSeq[Bind[_]] = replArgs0 override def print[T: TPrint]( - value: => T, - ident: String, - custom: Option[String] + value: => T, + ident: String, + custom: Option[String] )(implicit - tcolors: TPrintColors, - classTagT: ClassTag[T] + tcolors: TPrintColors, + classTagT: ClassTag[T] ): Iterator[String] = if (classTagT == scala.reflect.classTag[ammonite.Nope]) Iterator() @@ -182,7 +184,10 @@ class TestRepl(compilerBuilder: ICompilerBuilder = CompilerBuilder()) { self => for { (error, _) <- interp.initializePredef( - basePredefs, customPredefs, extraBridges, baseImports + basePredefs, + customPredefs, + extraBridges, + baseImports ) } { val (msgOpt, causeOpt) = error match { @@ -202,8 +207,6 @@ class TestRepl(compilerBuilder: ICompilerBuilder = CompilerBuilder()) { self => ) } - - def session(sess: String): Unit = { // Remove the margin from the block and break // it into blank-line-delimited steps @@ -215,10 +218,11 @@ class TestRepl(compilerBuilder: ICompilerBuilder = CompilerBuilder()) { self => // Strip margin & whitespace val steps = sess.replace( - Util.newLine + margin, Util.newLine + Util.newLine + margin, + Util.newLine ).replaceAll(" *\n", "\n").split("\n\n") - for((step, index) <- steps.zipWithIndex){ + for ((step, index) <- steps.zipWithIndex) { // Break the step into the command lines, starting with @, // and the result lines val (cmdLines, resultLines) = @@ -232,7 +236,7 @@ class TestRepl(compilerBuilder: ICompilerBuilder = CompilerBuilder()) { self => // // ...except for the empty 0-line fragment, and the entire fragment, // both of which are complete. - for (incomplete <- commandText.inits.toSeq.drop(1).dropRight(1)){ + for (incomplete <- commandText.inits.toSeq.drop(1).dropRight(1)) { assert(ammonite.compiler.Parsers.split(incomplete.mkString(Util.newLine)).forall(_.isLeft)) } @@ -267,22 +271,22 @@ class TestRepl(compilerBuilder: ICompilerBuilder = CompilerBuilder()) { self => assert(contains(error0.linesIterator.toList, strippedExpected.linesIterator.toList)) - }else if (expected.startsWith("warning: ")){ + } else if (expected.startsWith("warning: ")) { val strippedExpected = expected.stripPrefix("warning: ") assert(warning.contains(strippedExpected)) - }else if (expected == "warning:") + } else if (expected == "warning:") assert(warning.isEmpty) - else if (expected.startsWith("info: ")){ + else if (expected.startsWith("info: ")) { val strippedExpected = expected.stripPrefix("info: ") assert(info.contains(strippedExpected)) - }else if (expected == "") { - processed match{ + } else if (expected == "") { + processed match { case Res.Success(_) => // do nothing case Res.Skip => // do nothing case _: Res.Failing => - assert{ + assert { identity(error) identity(warning) identity(out) @@ -292,7 +296,7 @@ class TestRepl(compilerBuilder: ICompilerBuilder = CompilerBuilder()) { self => } } - }else { + } else { processed match { case Res.Success(str) => // Strip trailing whitespace @@ -308,7 +312,7 @@ class TestRepl(compilerBuilder: ICompilerBuilder = CompilerBuilder()) { self => if (expected0.endsWith(" = ?")) { val expectedStart = expected0.stripSuffix("?") failLoudly( - assert{ + assert { identity(error) identity(warning) identity(info) @@ -317,7 +321,7 @@ class TestRepl(compilerBuilder: ICompilerBuilder = CompilerBuilder()) { self => ) } else failLoudly( - assert{ + assert { identity(error) identity(warning) identity(info) @@ -326,7 +330,7 @@ class TestRepl(compilerBuilder: ICompilerBuilder = CompilerBuilder()) { self => ) case Res.Failure(failureMsg) => - assert{ + assert { identity(error) identity(warning) identity(out) @@ -337,19 +341,20 @@ class TestRepl(compilerBuilder: ICompilerBuilder = CompilerBuilder()) { self => } case Res.Exception(ex, failureMsg) => val trace = Repl.showException( - ex, fansi.Attrs.Empty, fansi.Attrs.Empty, fansi.Attrs.Empty - ) + Util.newLine + failureMsg - assert({identity(trace); identity(expected); false}) + ex, + fansi.Attrs.Empty, + fansi.Attrs.Empty, + fansi.Attrs.Empty + ) + Util.newLine + failureMsg + assert({ identity(trace); identity(expected); false }) case _ => throw new Exception( - s"Printed $allOut does not match what was expected: $expected" - ) + s"Printed $allOut does not match what was expected: $expected" + ) } } } } - - def run(input: String, index: Int) = { outBytes.reset() @@ -369,7 +374,7 @@ class TestRepl(compilerBuilder: ICompilerBuilder = CompilerBuilder()) { self => false, () => currentLine += 1 ) - processed match{ + processed match { case Res.Failure(s) => printer0.error(s) case Res.Exception(throwable, msg) => printer0.error( @@ -389,34 +394,35 @@ class TestRepl(compilerBuilder: ICompilerBuilder = CompilerBuilder()) { self => ) } - - def fail(input: String, - failureCheck: String => Boolean = _ => true) = { + def fail(input: String, failureCheck: String => Boolean = _ => true) = { val (processed, out, _, warning, error, info) = run(input, 0) - processed match{ - case Res.Success(v) => assert({identity(v); identity(allOutput); false}) + processed match { + case Res.Success(v) => assert({ identity(v); identity(allOutput); false }) case Res.Failure(s) => failLoudly(assert(failureCheck(s))) case Res.Exception(ex, s) => val msg = Repl.showException( - ex, fansi.Attrs.Empty, fansi.Attrs.Empty, fansi.Attrs.Empty + ex, + fansi.Attrs.Empty, + fansi.Attrs.Empty, + fansi.Attrs.Empty ) + Util.newLine + s failLoudly(assert(failureCheck(msg))) case _ => ??? } } - def result(input: String, expected: Res[Evaluated]) = { val (processed, allOut, _, warning, error, info) = run(input, 0) assert(processed == expected) } def failLoudly[T](t: => T) = try t - catch{ case e: utest.AssertionError => - println("FAILURE TRACE" + Util.newLine + allOutput) - throw e + catch { + case e: utest.AssertionError => + println("FAILURE TRACE" + Util.newLine + allOutput) + throw e } def notFound(name: String): String = diff --git a/amm/repl/src/test/scala/ammonite/TestUtils.scala b/amm/repl/src/test/scala/ammonite/TestUtils.scala index 89e8cce15..d0a828a4e 100644 --- a/amm/repl/src/test/scala/ammonite/TestUtils.scala +++ b/amm/repl/src/test/scala/ammonite/TestUtils.scala @@ -17,17 +17,21 @@ object TestUtils { def scala2 = ammonite.compiler.CompilerBuilder.scalaVersion.startsWith("2.") def createTestInterp( - storage: Storage, - predefImports: Imports = Imports(), - predef: String = "" + storage: Storage, + predefImports: Imports = Imports(), + predef: String = "" ) = { val initialClassLoader = Thread.currentThread().getContextClassLoader val startFrame = Frame.createInitial(initialClassLoader) val printStream = new PrintStream(System.out) val interpParams = Interpreter.Parameters( printer = Printer( - printStream, new PrintStream(System.err), printStream, - println, println, println + printStream, + new PrintStream(System.err), + printStream, + println, + println, + println ), storage = storage, wd = os.pwd, @@ -40,7 +44,6 @@ object TestUtils { val interp = new Interpreter( ammonite.compiler.CompilerBuilder(), () => ammonite.compiler.Parsers, - getFrame = () => startFrame, createFrame = () => throw new Exception("unsupported"), replCodeWrapper = DefaultCodeWrapper, diff --git a/amm/repl/src/test/scala/ammonite/interp/AutocompleteTests.scala b/amm/repl/src/test/scala/ammonite/interp/AutocompleteTests.scala index f833504f7..349f890a1 100644 --- a/amm/repl/src/test/scala/ammonite/interp/AutocompleteTests.scala +++ b/amm/repl/src/test/scala/ammonite/interp/AutocompleteTests.scala @@ -5,12 +5,14 @@ import ammonite.TestUtils._ import utest._ import ammonite.util.Util -object AutocompleteTests extends TestSuite{ - class Completer{ - val check = new DualTestRepl() - def apply(caretCode: String, - cmp: (Set[String]) => Set[String], - sigs: (Set[String]) => Set[String] = _ => Set()) = { +object AutocompleteTests extends TestSuite { + class Completer { + val check = new DualTestRepl() + def apply( + caretCode: String, + cmp: (Set[String]) => Set[String], + sigs: (Set[String]) => Set[String] = _ => Set() + ) = { val from = caretCode.indexOf("") val caretCode0 = if (from < 0) caretCode @@ -42,18 +44,22 @@ object AutocompleteTests extends TestSuite{ def ^(s2: Set[T]): Set[T] = (s1 diff s2) | (s2 diff s1) } - val tests = Tests{ + val tests = Tests { println("AutocompleteTests") - test("selection"){ - + test("selection") { // Not sure why clone and finalize don't appear in this list val anyCompletion = Set( - "!=", "==", - "toString", "equals", "hashCode", - "getClass", "asInstanceOf", "isInstanceOf" + "!=", + "==", + "toString", + "equals", + "hashCode", + "getClass", + "asInstanceOf", + "isInstanceOf" ) - test("import") - checking{ complete => + test("import") - checking { complete => // these fail on Java 9, need investigation if (!Util.java9OrAbove && complete.check.scala2) { complete("""import """, Set("java", "javax", "scala") -- _) @@ -79,26 +85,30 @@ object AutocompleteTests extends TestSuite{ Set("LinkedHashMap", "LinkedHashSet", "LinkedList") ^ _ ) complete( - """import scala.uti.""", Set.empty[String] -- _ + """import scala.uti.""", + Set.empty[String] -- _ ) complete( - """import scala.colltion.""", Set.empty[String] -- _ + """import scala.colltion.""", + Set.empty[String] -- _ ) complete("""object X { import y ; def y(z: Int)""", Set.empty[String] -- _) complete( - """import scala.collection.immutable.List.{em, fi}""", Set("empty") -- _ + """import scala.collection.immutable.List.{em, fi}""", + Set("empty") -- _ ) complete( - """import scala.collection.immutable.List.{em, fi}""", Set("fill") -- _ + """import scala.collection.immutable.List.{em, fi}""", + Set("fill") -- _ ) } - test("scope") - checking{ complete => + test("scope") - checking { complete => // these fail on Java 9, need investigation if (!Util.java9OrAbove && complete.check.scala2) { - complete( """""", Set("scala") -- _) - complete( """Seq(1, 2, 3).map(argNameLol => )""", Set("argNameLol") -- _) - complete( """object Zomg{ }""", Set("Zomg") -- _) + complete("""""", Set("scala") -- _) + complete("""Seq(1, 2, 3).map(argNameLol => )""", Set("argNameLol") -- _) + complete("""object Zomg{ }""", Set("Zomg") -- _) complete( "printl", Set("println") ^ _, @@ -112,75 +122,62 @@ object AutocompleteTests extends TestSuite{ // Set("def println(x: Any): Unit", "def println(): Unit") ^ // ) } - test("scopePrefix") - checking{ complete => + test("scopePrefix") - checking { complete => // these fail on Java 9, need investigation if (!Util.java9OrAbove && complete.check.scala2) { - complete( """ammon""", Set("ammonite") ^ _) + complete("""ammon""", Set("ammonite") ^ _) complete( """Seq(1, 2, 3).map(argNameLol => argNam)""", Set("argNameLol") ^ _ ) - complete( """object Zomg{ Zom }""", Set("Zomg") ^ _) - complete( """object Zomg{ Zom }""", Set("Zomg") ^ _) - complete( """object Zomg{ Zom }""", Set("Zomg") ^ _) - complete( """object Zomg{ Zom }""", Set("Zomg") ^ _) + complete("""object Zomg{ Zom }""", Set("Zomg") ^ _) + complete("""object Zomg{ Zom }""", Set("Zomg") ^ _) + complete("""object Zomg{ Zom }""", Set("Zomg") ^ _) + complete("""object Zomg{ Zom }""", Set("Zomg") ^ _) } } - test("dot") - checking{ complete => - - complete( """java.math.""", + test("dot") - checking { complete => + complete( + """java.math.""", Set("MathContext", "BigDecimal", "BigInteger", "RoundingMode") ^ _ ) val extra = if (scala2_12) Set() else Set("unless", "when") - complete( """scala.Option.""", - (anyCompletion ++ Set("apply", "empty") ++ extra) ^ _ - ) + complete("""scala.Option.""", (anyCompletion ++ Set("apply", "empty") ++ extra) ^ _) - complete( """Seq(1, 2, 3).map(_.)""", - (anyCompletion ++ Set("+", "-", "*", "/")) -- _ - ) + complete("""Seq(1, 2, 3).map(_.)""", (anyCompletion ++ Set("+", "-", "*", "/")) -- _) - complete( """val x = 1; x + (x.)""", - Set("-", "+", "*", "/") -- _ - ) + complete("""val x = 1; x + (x.)""", Set("-", "+", "*", "/") -- _) } def deepTests(complete: Completer) = { - complete( """fromN""", - Set("scala.concurrent.duration.fromNow") ^ _ - ) - complete( """Fut""", + complete("""fromN""", Set("scala.concurrent.duration.fromNow") ^ _) + complete( + """Fut""", Set("scala.concurrent.Future", "java.util.concurrent.Future") -- _ ) - complete( """SECO""", - Set("scala.concurrent.duration.SECONDS") ^ _ - ) + complete("""SECO""", Set("scala.concurrent.duration.SECONDS") ^ _) } - test("dotPrefix") - checking{ complete => - complete( """java.math.Big""", - Set("BigDecimal", "BigInteger") ^ _ - ) - complete( """scala.Option.option2""", - Set() ^ _ - ) + test("dotPrefix") - checking { complete => + complete("""java.math.Big""", Set("BigDecimal", "BigInteger") ^ _) + complete("""scala.Option.option2""", Set() ^ _) val expected = if (complete.check.scala2) Set( - "def >(x: Double): Boolean", - "def >(x: Float): Boolean", - "def >(x: Int): Boolean", - "def >(x: Short): Boolean", - "def >(x: Long): Boolean", - "def >(x: Char): Boolean", - "def >(x: Byte): Boolean" - ) + "def >(x: Double): Boolean", + "def >(x: Float): Boolean", + "def >(x: Int): Boolean", + "def >(x: Short): Boolean", + "def >(x: Long): Boolean", + "def >(x: Char): Boolean", + "def >(x: Byte): Boolean" + ) else Set( "def >>>(x: Long): Int", @@ -202,11 +199,7 @@ object AutocompleteTests extends TestSuite{ "def >(x: Double): Boolean", "def >(x: Float): Boolean" ) - complete( """val x = 1; x + x.>""", - Set(">>", ">>>") -- _, - expected ^ _ - ) - + complete("""val x = 1; x + x.>""", Set(">>", ">>>") -- _, expected ^ _) // https://issues.scala-lang.org/browse/SI-9153 // @@ -222,20 +215,20 @@ object AutocompleteTests extends TestSuite{ // complete("""Seq(1, 2, 3).map(_.compa)""", compares, ^) } - test("deep") - checking{ complete => + test("deep") - checking { complete => if (complete.check.scala2) deepTests(complete) else "Disabled in Scala 3" } - test("defTab") - checking{ complete => - //Assert no NullPointerException was thrown. Does not verify any completions. - complete( """def""", Set.empty -- _) + test("defTab") - checking { complete => + // Assert no NullPointerException was thrown. Does not verify any completions. + complete("""def""", Set.empty -- _) } - test("Array") - checking{ complete => - //Test around https://github.com/lihaoyi/Ammonite/issues/252 + test("Array") - checking { complete => + // Test around https://github.com/lihaoyi/Ammonite/issues/252 complete("""new Array""", Set() ^ _) } @@ -249,7 +242,7 @@ object AutocompleteTests extends TestSuite{ } } - test("backquotes"){ + test("backquotes") { test("spaces") - checking { complete => complete( """object x { val `Backquoted Bar` = 1 }; x.""", @@ -265,7 +258,7 @@ object AutocompleteTests extends TestSuite{ } } - test("dependencies"){ + test("dependencies") { def dependenciesTests(complete: Completer) = { complete( """import $ivy.`io.get-c`""", @@ -315,4 +308,3 @@ object AutocompleteTests extends TestSuite{ } } } - diff --git a/amm/repl/src/test/scala/ammonite/interp/PrintTests.scala b/amm/repl/src/test/scala/ammonite/interp/PrintTests.scala index 1edb10ff1..d858db2cd 100644 --- a/amm/repl/src/test/scala/ammonite/interp/PrintTests.scala +++ b/amm/repl/src/test/scala/ammonite/interp/PrintTests.scala @@ -4,12 +4,12 @@ import ammonite.DualTestRepl import ammonite.util.Util.newLine import utest._ -object PrintTests extends TestSuite{ - val tests = Tests{ +object PrintTests extends TestSuite { + val tests = Tests { println("PrintTests") val check = new DualTestRepl() - test("simple"){ + test("simple") { for (repl <- check.repls) { val t @ (ev, out, res, warn, err, inf) = repl.run("val n = 2", 0) val expectedRes = "n: Int = 2" @@ -20,7 +20,7 @@ object PrintTests extends TestSuite{ } } - test("out"){ + test("out") { for (repl <- check.repls) { val t @ (ev, out, res, warn, err, inf) = repl.run("show(List(1, 2, 3))", 0) val expectedOut = "List(1, 2, 3)" + newLine @@ -31,7 +31,7 @@ object PrintTests extends TestSuite{ } } - test("both"){ + test("both") { for (repl <- check.repls) { val t @ (ev, out, res, warn, err, inf) = repl.run("show(List(1, 2, 3)); val n = 3", 0) val expectedOut = "List(1, 2, 3)" + newLine diff --git a/amm/repl/src/test/scala/ammonite/session/AdvancedTests.scala b/amm/repl/src/test/scala/ammonite/session/AdvancedTests.scala index 614e4b2de..6cc7b8b01 100644 --- a/amm/repl/src/test/scala/ammonite/session/AdvancedTests.scala +++ b/amm/repl/src/test/scala/ammonite/session/AdvancedTests.scala @@ -5,12 +5,11 @@ import ammonite.{DualTestRepl, TestRepl} import ammonite.util.Res import utest._ - -object AdvancedTests extends TestSuite{ - val tests = Tests{ +object AdvancedTests extends TestSuite { + val tests = Tests { println("AdvancedTests") val check = new DualTestRepl() - test("pprint"){ + test("pprint") { check.session(s""" @ Seq.fill(10)(Seq.fill(3)("Foo")) res0: Seq[Seq[String]] = List( @@ -46,15 +45,15 @@ object AdvancedTests extends TestSuite{ """) } - test("exit"){ + test("exit") { check.result("exit", Res.Exit(())) } - test("skip"){ + test("skip") { check.result("", Res.Skip) } - test("predef"){ - val check2 = new DualTestRepl{ + test("predef") { + val check2 = new DualTestRepl { override def predef = ( """ import math.abs @@ -79,8 +78,8 @@ object AdvancedTests extends TestSuite{ """) } - test("predefSettings"){ - val check2 = new DualTestRepl{ + test("predefSettings") { + val check2 = new DualTestRepl { override def predef = ( if (scala2) """ @@ -102,7 +101,7 @@ object AdvancedTests extends TestSuite{ """) } - test("macros"){ + test("macros") { if (check.scala2) check.session(""" @ import language.experimental.macros @@ -152,7 +151,7 @@ object AdvancedTests extends TestSuite{ @ assert(ammStackTrace.forall(_.contains("cmd3.sc"))) """) } - test("typeScope"){ + test("typeScope") { // TPrint issue in Scala 3? val cmBufferType = if (check.scala2) "collection.mutable.Buffer" else "Buffer" @@ -176,13 +175,13 @@ object AdvancedTests extends TestSuite{ res5: Buffer[Int] = ArrayBuffer(1) """) } - test("trappedType"){ + test("trappedType") { check.session(""" @ val nope = ammonite.Nope(2); val n = 2 n: Int = 2 """) } - test("unwrapping"){ + test("unwrapping") { check.session(""" @ { @ val x = 1 @@ -194,7 +193,7 @@ object AdvancedTests extends TestSuite{ res0_2: Int = 3 """) } - test("forceWrapping"){ + test("forceWrapping") { check.session(""" @ {{ @ val x = 1 @@ -204,9 +203,9 @@ object AdvancedTests extends TestSuite{ res0: Int = 3 """) } - test("truncation"){ + test("truncation") { // Need a way to capture stdout in tests to make these tests work - if(false) check.session(""" + if (false) check.session(""" @ Seq.fill(20)(100) res0: Seq[Int] = List( 100, @@ -266,7 +265,7 @@ object AdvancedTests extends TestSuite{ ... """) } - test("private"){ + test("private") { test("vals") - check.session(s""" @ private val x = 1; val y = x + 1 y: Int = 2 @@ -289,7 +288,7 @@ object AdvancedTests extends TestSuite{ """) - test("dontPrint"){ + test("dontPrint") { check.session( """ @ private object Foo { def get = "a" }; val s = Foo.get @@ -312,10 +311,11 @@ object AdvancedTests extends TestSuite{ @ private type T = String; private def foo(): T = "a"; val s: String = foo() s: String = "a" - """) + """ + ) } } - test("compilerPlugin") - retry(3){ + test("compilerPlugin") - retry(3) { if (check.scala2) check.session(""" @ // Compiler plugins imported without `.$plugin` are not loaded @@ -348,7 +348,7 @@ object AdvancedTests extends TestSuite{ else "Disabled" } - test("replApiUniqueness"){ + test("replApiUniqueness") { // Make sure we can instantiate multiple copies of Interpreter, with each // one getting its own `ReplBridge`. This ensures that the various // Interpreters are properly encapsulated and don't interfere with each @@ -376,8 +376,7 @@ object AdvancedTests extends TestSuite{ if (scalaVer == "2.12.0" || scalaVer == "2.12.1") "2.1.0" else "2.1.1" s"""import $$plugin.$$ivy.`org.scalamacros:::paradise:$paradiseVersion`""" - } - else + } else "interp.configureCompiler(_.settings.YmacroAnnotations.value = true)" if (check.scala2) @@ -401,7 +400,7 @@ object AdvancedTests extends TestSuite{ else "Disabled in Scala 3" } - test("desugar"){ + test("desugar") { if (check.scala2) check.session(""" @ desugar{1 + 2 max 3} @@ -410,14 +409,14 @@ object AdvancedTests extends TestSuite{ else "Disabled in Scala 3" } - test("loadingModulesInPredef"){ + test("loadingModulesInPredef") { - val dir = os.pwd/"amm"/"src"/"test"/"resources"/"scripts"/"predefWithLoad" - test("loadExec"){ + val dir = os.pwd / "amm" / "src" / "test" / "resources" / "scripts" / "predefWithLoad" + test("loadExec") { val c1 = new DualTestRepl() { override def predef = ( - os.read(dir/"PredefLoadExec.sc"), - Some(dir/"PredefLoadExec.sc") + os.read(dir / "PredefLoadExec.sc"), + Some(dir / "PredefLoadExec.sc") ) } c1.session(""" @@ -425,11 +424,11 @@ object AdvancedTests extends TestSuite{ previouslyLoaded: Int = 1337 """) } - test("loadModule"){ - val c2 = new DualTestRepl(){ + test("loadModule") { + val c2 = new DualTestRepl() { override def predef = ( - os.read(dir/"PredefLoadModule.sc"), - Some(dir/"PredefLoadModule.sc") + os.read(dir / "PredefLoadModule.sc"), + Some(dir / "PredefLoadModule.sc") ) } c2.session(""" @@ -437,11 +436,11 @@ object AdvancedTests extends TestSuite{ previouslyLoaded: Int = 1337 """) } - test("importIvy"){ - val c2 = new DualTestRepl(){ + test("importIvy") { + val c2 = new DualTestRepl() { override def predef = ( - os.read(dir/"PredefMagicImport.sc"), - Some(dir/"PredefMagicImport.sc") + os.read(dir / "PredefMagicImport.sc"), + Some(dir / "PredefMagicImport.sc") ) } c2.session(""" @@ -453,7 +452,7 @@ object AdvancedTests extends TestSuite{ """) } } - test("bytecodeForReplClasses"){ + test("bytecodeForReplClasses") { check.session(""" @ case class Child(name: String) @@ -468,15 +467,15 @@ object AdvancedTests extends TestSuite{ res4: Boolean = true """) } - test("customBridge"){ + test("customBridge") { check.session(""" @ val s = test.message s: String = "ba" """) } - test("dontRefreshCompiler"){ - test{ + test("dontRefreshCompiler") { + test { check.session(""" @ val c1 = repl.compiler @@ -496,7 +495,7 @@ object AdvancedTests extends TestSuite{ """) } - test("preconfigured"){ + test("preconfigured") { check.session(""" @ val c0 = repl.compiler @@ -523,7 +522,7 @@ object AdvancedTests extends TestSuite{ } } - test("loadURL"){ + test("loadURL") { if (check.scala2) { val sbv = { val sv = check.scalaVersion @@ -546,7 +545,7 @@ object AdvancedTests extends TestSuite{ } else "Disabled in Scala 3" } - test("accessPressy"){ + test("accessPressy") { if (check.scala2) check.session(""" @ def typeAt(code: String, pos: Int) = { @ import scala.tools.nsc.interactive.Response @@ -572,7 +571,7 @@ object AdvancedTests extends TestSuite{ """) else "N/A in Scala 3" } - test("accessInMemoryClassMap"){ + test("accessInMemoryClassMap") { check.session(""" @ class Foo defined class Foo @@ -595,7 +594,7 @@ object AdvancedTests extends TestSuite{ """) } - test("given"){ + test("given") { if (check.scala2) "N/A" else check.session(""" @@ -619,7 +618,7 @@ object AdvancedTests extends TestSuite{ fooOrd: Ordering[Foo] = """) } - test("extension-methods"){ + test("extension-methods") { if (scala2) "N/A" else check.session(""" @@ -830,8 +829,7 @@ object AdvancedTests extends TestSuite{ ^ """ ) - } - else { + } else { objCheck.session( """ @ @deprecated("foo", "1.2") def value(): Int = 2 diff --git a/amm/repl/src/test/scala/ammonite/session/BuiltinTests.scala b/amm/repl/src/test/scala/ammonite/session/BuiltinTests.scala index e126df886..ef276cf48 100644 --- a/amm/repl/src/test/scala/ammonite/session/BuiltinTests.scala +++ b/amm/repl/src/test/scala/ammonite/session/BuiltinTests.scala @@ -7,12 +7,12 @@ import java.io.File import scala.collection.JavaConverters._ import scala.collection.{immutable => imm} -object BuiltinTests extends TestSuite{ +object BuiltinTests extends TestSuite { - val tests = Tests{ + val tests = Tests { println("BuiltinTests") val check = new DualTestRepl() - test("basicConfig"){ + test("basicConfig") { check.session(""" @ // Set the shell prompt to be something else @@ -56,7 +56,7 @@ object BuiltinTests extends TestSuite{ """) } - test("imports"){ + test("imports") { check.session(""" @ assert(repl.imports.toString == ""); assert(repl.fullImports.toString != "") @@ -72,7 +72,7 @@ object BuiltinTests extends TestSuite{ """) } - test("loadCP"){ + test("loadCP") { check.session(""" @ val javaSrc = os.pwd/"amm"/"src"/"test"/"resources"/"loadable"/"hello"/"Hello.java" @@ -91,7 +91,11 @@ object BuiltinTests extends TestSuite{ test("importCp") { test { val catsCp = coursierapi.Fetch.create() - .addDependencies(coursierapi.Dependency.of("org.typelevel", "cats-core_" + check.scalaBinaryVersion, "2.9.0")) + .addDependencies(coursierapi.Dependency.of( + "org.typelevel", + "cats-core_" + check.scalaBinaryVersion, + "2.9.0" + )) .fetch() .asScala .map(os.Path(_, os.pwd)) @@ -110,7 +114,11 @@ object BuiltinTests extends TestSuite{ test { val catsCp = coursierapi.Fetch.create() - .addDependencies(coursierapi.Dependency.of("org.typelevel", "cats-core_" + check.scalaBinaryVersion, "2.9.0")) + .addDependencies(coursierapi.Dependency.of( + "org.typelevel", + "cats-core_" + check.scalaBinaryVersion, + "2.9.0" + )) .fetch() .asScala val cpStr = catsCp.map(_.toString).mkString(File.pathSeparator) @@ -124,7 +132,7 @@ object BuiltinTests extends TestSuite{ """) } } - test("settings"){ + test("settings") { val fruitlessTypeTestWarningMessageBlahBlahBlah = "fruitless type test: a value of type List[Int] cannot also be a List[Double]" @@ -183,7 +191,7 @@ object BuiltinTests extends TestSuite{ res10: Boolean = false """) } - test("infoLogging"){ + test("infoLogging") { if (check.scala2) check.session(""" @ 1 + 1 @@ -199,8 +207,7 @@ object BuiltinTests extends TestSuite{ "Disabled in Scala 3" } - - test("saveLoad"){ + test("saveLoad") { check.session( s""" @ val veryImportant = 1 @@ -238,9 +245,10 @@ object BuiltinTests extends TestSuite{ @ import scalatags.Text.all._ error: ${check.notFound("scalatags")} - """) + """ + ) } - test("saveLoad2"){ + test("saveLoad2") { check.session(""" @ val (x, y) = (1, 2) x: Int = 1 @@ -274,7 +282,7 @@ object BuiltinTests extends TestSuite{ res11: Int = -1 """) } - test("discardLoadCommandResult"){ + test("discardLoadCommandResult") { test - check.session(s""" @ repl.sess.save("foo") @@ -296,7 +304,7 @@ object BuiltinTests extends TestSuite{ n0: Int = 2 """) } - test("firstFrameNotFrozen"){ + test("firstFrameNotFrozen") { check.session(""" @ 2 res0: Int = 2 diff --git a/amm/repl/src/test/scala/ammonite/session/EulerTests.scala b/amm/repl/src/test/scala/ammonite/session/EulerTests.scala index e6b15c061..1a6d32a91 100644 --- a/amm/repl/src/test/scala/ammonite/session/EulerTests.scala +++ b/amm/repl/src/test/scala/ammonite/session/EulerTests.scala @@ -2,22 +2,21 @@ package ammonite.session import ammonite.DualTestRepl import utest._ -object EulerTests extends TestSuite{ - val tests = Tests{ +object EulerTests extends TestSuite { + val tests = Tests { println("EulerTests") val check = new DualTestRepl() - // Taken from https://pavelfatin.com/scala-for-project-euler/ // Thanks Pavel! - test("p1"){ + test("p1") { // Add all the natural numbers below one thousand that are multiples of 3 or 5.* check.session(""" @ val r = (1 until 1000).view.filter(n => n % 3 == 0 || n % 5 == 0).sum r: Int = 233168 """) } - test("p2"){ + test("p2") { // Find the sum of all the even-valued terms in the // Fibonacci sequence which do not exceed four million.* check.session(""" @@ -28,7 +27,7 @@ object EulerTests extends TestSuite{ r: Int = 4613732 """) } - test("p3"){ + test("p3") { // Find the largest prime factor of a composite number.* check.session(""" @ def factors(n: Long): List[Long] = { @@ -40,10 +39,9 @@ object EulerTests extends TestSuite{ @ factors(600851475143L).last res1: Long = 6857L - """ - ) + """) } - test("p4"){ + test("p4") { // Find the largest palindrome made from the product of two 3-digit numbers.* // Doesn't work due to RangePosition problem check.session(""" @@ -56,7 +54,7 @@ object EulerTests extends TestSuite{ res0: Int = 906609 """) } - test("p5"){ + test("p5") { // What is the smallest number divisible by each of the numbers 1 to 10?* // Original value was 20, but 10 runs a lot quicker check.session(""" @@ -64,7 +62,7 @@ object EulerTests extends TestSuite{ res0: Int = 2520 """) } - test("p6"){ + test("p6") { // What is the difference between the sum of the squares and the // square of the sums?* check.session(""" @@ -77,7 +75,7 @@ object EulerTests extends TestSuite{ res2: Int = 25164150 """) } - test("p7"){ + test("p7") { // Find the 10001st prime.* check.session(""" @ lazy val ps: Stream[Int] = 2 #:: Stream.from(3).filter(i => @@ -88,7 +86,7 @@ object EulerTests extends TestSuite{ res1: Int = 104743 """) } - test("p8"){ + test("p8") { // Discover the largest product of five consecutive digits in the 1000-digit number.* val data = """73167176531330624919225119674426574742355349194934 @@ -124,7 +122,7 @@ object EulerTests extends TestSuite{ res1: Int = 40824 """) } - test("p9"){ + test("p9") { // Find the only Pythagorean triplet, {a, b, c}, for which a + b + c = 1000.* check.session(""" @ val limit = (1 to 1000).find(n => n + math.sqrt(n) >= 1000).get @@ -142,7 +140,7 @@ object EulerTests extends TestSuite{ rs: IndexedSeq[Int] = Vector(31875000) """) } - test("p10"){ + test("p10") { // Calculate the sum of all the primes below 200,000.* // Originally 2,000,000, reduced to 200,000 for perf check.session(""" @@ -152,10 +150,9 @@ object EulerTests extends TestSuite{ @ ps.view.takeWhile(_ < 200000).foldLeft(0L)(_ + _) res1: Long = 1709600813L - """ - ) + """) } - test("p11"){ + test("p11") { // What is the greatest product of four numbers // on the same straight line in the 20 by 20 grid?* val data = @@ -203,7 +200,7 @@ object EulerTests extends TestSuite{ res5: Int = 70600674 """) } - test("p12"){ + test("p12") { // What is the value of the first triangle number to have over five hundred divisors?* check.session(""" @ lazy val ts: Stream[Int] = 0 #:: ts.zipWithIndex.map(p => p._1 + p._2 + 1) @@ -220,7 +217,7 @@ object EulerTests extends TestSuite{ res2: Int = 76576500 """) } - test("p13"){ + test("p13") { // Find the first ten digits of the sum of one-hundred 50-digit numbers.* val data = """37107287533902102798797998220837590246510135740250 |46376937677490009712648124896970078050417018260538 @@ -330,7 +327,7 @@ object EulerTests extends TestSuite{ """) } - test("p14"){ + test("p14") { // Find the longest sequence using a starting number under one million.* check.session(""" @ def from(n: Long, c: Int = 0): Int = { @@ -347,7 +344,7 @@ object EulerTests extends TestSuite{ """) } - test("p15"){ + test("p15") { // Starting in the top left corner in a 20 by 20 grid, // how many routes are there to the bottom right corner?* check.session(""" @@ -363,14 +360,14 @@ object EulerTests extends TestSuite{ res2: Long = 137846528820L """) } - test("p16"){ + test("p16") { // What is the sum of the digits of the number 2^1000?* check.session(""" @ BigInt(2).pow(1000).toString.view.map(_.asDigit).sum res0: Int = 1366 """) } - test("p17"){ + test("p17") { // How many letters would be needed to write all // the numbers in words from 1 to 1000?* check.session(""" @@ -394,8 +391,7 @@ object EulerTests extends TestSuite{ """) } - - test("p18"){ + test("p18") { // Find the maximum sum travelling from the top of the triangle to the base.* val data = """75 |95 64 @@ -429,8 +425,8 @@ object EulerTests extends TestSuite{ @ assert(r == 1074) // 2 ms """) } - test("p19"){ - //How many Sundays fell on the first of the month during the twentieth century?* + test("p19") { + // How many Sundays fell on the first of the month during the twentieth century?* check.session(""" @ val lengths = Array(31, 0, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31) @@ -447,7 +443,7 @@ object EulerTests extends TestSuite{ r: Int = 171 """) } - test("p20"){ + test("p20") { // Find the sum of digits in 100!* check.session(""" @ def f(n: BigInt): BigInt = if(n < 2) 1 else n * f(n - 1) @@ -456,7 +452,7 @@ object EulerTests extends TestSuite{ r: Int = 648 """) } - test("p21"){ + test("p21") { // Evaluate the sum of all amicable pairs under 1000.* // Originally 10000, shorted to 1000 for performance check.session(""" @@ -472,7 +468,7 @@ object EulerTests extends TestSuite{ r: Int = 504 """) } - test("p22"){ + test("p22") { // Smaller than the real dataset, because the real dataset // is too big to copy & paste into the unit test code. val data = "\"\"\"" + """ @@ -547,7 +543,7 @@ object EulerTests extends TestSuite{ // """) // } - test("p24"){ + test("p24") { // What is the thousanth lexicographic permutation of the digits // 0, 1, 2, 3, 4, 5, 6, 7, 8 and 9?* check.session(""" @@ -560,7 +556,7 @@ object EulerTests extends TestSuite{ r: Long = 124658793L """) } - test("p25"){ + test("p25") { // What is the first term in the Fibonacci sequence to contain 1000 digits?* check.session(""" @ lazy val fs: Stream[BigInt] = { @@ -572,7 +568,7 @@ object EulerTests extends TestSuite{ """) } - test("p26"){ + test("p26") { // Find the value of d < 100 for which 1/d contains the longest recurring cycle.* // Originally 1000, reduced to 100 for perf check.session(""" @@ -585,7 +581,7 @@ object EulerTests extends TestSuite{ r: Int = 97 """) } - test("p27"){ + test("p27") { // Find a quadratic formula that produces the maximum number of // primes for consecutive values of n.* // Originally -999 to 1000, reduced to -99 to 100 to speed it up a bit @@ -605,7 +601,7 @@ object EulerTests extends TestSuite{ """) } - test("p28"){ + test("p28") { // What is the sum of both diagonals in a 1001 by 1001 spiral?* check.session(""" @ def cs(n: Int, p: Int): Stream[Int] = @@ -616,7 +612,7 @@ object EulerTests extends TestSuite{ """) } - test("p29"){ + test("p29") { // How many distinct terms are in the sequence generated by ab // for 2 ≤ a ≤ 100 and 2 ≤ b ≤ 100?* check.session(""" @@ -630,7 +626,7 @@ object EulerTests extends TestSuite{ """) } - test("p30"){ + test("p30") { // Find the sum of all the numbers that can be written as // the sum of fifth powers of their digits.* check.session(""" @@ -644,7 +640,7 @@ object EulerTests extends TestSuite{ r: Int = 443839 """) } - test("p31"){ + test("p31") { // Investigating combinations of English currency denominations.* check.session(""" @ def f(ms: List[Int], n: Int): Int = ms match { @@ -657,7 +653,7 @@ object EulerTests extends TestSuite{ r: Int = 73682 """) } - test("p32"){ + test("p32") { // Find the sum of all numbers that can be written as pandigital products.* check.session(""" @ val ms = for { diff --git a/amm/repl/src/test/scala/ammonite/session/EvaluatorTests.scala b/amm/repl/src/test/scala/ammonite/session/EvaluatorTests.scala index 093cb43b8..62da18399 100644 --- a/amm/repl/src/test/scala/ammonite/session/EvaluatorTests.scala +++ b/amm/repl/src/test/scala/ammonite/session/EvaluatorTests.scala @@ -6,12 +6,12 @@ import utest._ import scala.collection.{immutable => imm} -object EvaluatorTests extends TestSuite{ +object EvaluatorTests extends TestSuite { - val tests = Tests{ + val tests = Tests { println("EvaluatorTests") val check = new DualTestRepl() - test("simpleExpressions"){ + test("simpleExpressions") { check.session(""" @ 1 + 2 res0: Int = 3 @@ -23,7 +23,7 @@ object EvaluatorTests extends TestSuite{ res2: Int = 6 """) } - test("vals"){ + test("vals") { check.session(""" @ val x = 10L x: Long = 10L @@ -44,7 +44,7 @@ object EvaluatorTests extends TestSuite{ res5: String = "class" """) } - test("lazyvals"){ + test("lazyvals") { // It actually appears when I ask for it, and the // actual evaluation happens in the correct order check.session(""" @@ -74,7 +74,7 @@ object EvaluatorTests extends TestSuite{ """) } - test("vars"){ + test("vars") { check.session(""" @ var x: Int = 10 x: Int = 10 @@ -89,7 +89,7 @@ object EvaluatorTests extends TestSuite{ """) } - test("defs"){ + test("defs") { check.session(""" @ def sumItAll[T: Numeric](i: Seq[T]): T = {i.sum} defined function sumItAll @@ -101,7 +101,7 @@ object EvaluatorTests extends TestSuite{ res2: Long = 15L """) } - test("types"){ + test("types") { val aliasedType = if (check.scala2) "Funky" else "Array[Array[String]]" val aliasedType2 = @@ -120,7 +120,7 @@ object EvaluatorTests extends TestSuite{ arr: $aliasedType2 = Array(Array(123)) """) } - test("library"){ + test("library") { // x and y pprinted value is test("non") - empty iterator' up to 2.12.6, // '' since 2.12.7, hence the '?' (don't check the value) check.session(""" @@ -135,7 +135,7 @@ object EvaluatorTests extends TestSuite{ """) } - test("classes"){ + test("classes") { val objWrapperPrefix = if (check.scala2) "" else "ammonite.$sess.cmd2." val classWrapperCmd3Prefix = @@ -188,7 +188,7 @@ object EvaluatorTests extends TestSuite{ // CO != res3 should test roughly the same thing } - test("packageImport"){ + test("packageImport") { check.session(""" @ import java.util._ @@ -196,7 +196,7 @@ object EvaluatorTests extends TestSuite{ """) } - test("nesting"){ + test("nesting") { check.session(""" @ val x = 1 x: Int = 1 @@ -217,7 +217,7 @@ object EvaluatorTests extends TestSuite{ res5: Int = 2 """) } - test("nestedImports"){ + test("nestedImports") { check.session(""" @ class Foo { object values { def a = "a" }; override def toString = "Foo" } defined class Foo @@ -230,7 +230,7 @@ object EvaluatorTests extends TestSuite{ a0: String = "a" """) } - test("multistatement"){ + test("multistatement") { check.session(s""" @ ;1; 2L; '3'; res0_0: Int = 1 @@ -255,7 +255,7 @@ object EvaluatorTests extends TestSuite{ """) } - test("multiassign"){ + test("multiassign") { check.session(""" @ val (a, b) = (1, 2) a: Int = 1 @@ -271,7 +271,7 @@ object EvaluatorTests extends TestSuite{ d: Int = 4 """) } - test("parsingProblems"){ + test("parsingProblems") { check.session(""" @ (1 + 1) res0: Int = 2 @@ -293,7 +293,7 @@ object EvaluatorTests extends TestSuite{ res3: Seq[Int] = List(1) """) } - test("backticks"){ + test("backticks") { check.session(""" @ val `1+1` = 1 `1+1`: Int = 1 @@ -364,7 +364,7 @@ object EvaluatorTests extends TestSuite{ """) } - test("referenceTraitFromPreviousCommand"){ + test("referenceTraitFromPreviousCommand") { // When class-based wrapping is enabled, the second command references a trait from the // first one, and this case has to be handled by the "used earlier definitions" mechanism. check.session(""" diff --git a/amm/repl/src/test/scala/ammonite/session/FailureTests.scala b/amm/repl/src/test/scala/ammonite/session/FailureTests.scala index d8d8b1c0c..4b2715d41 100644 --- a/amm/repl/src/test/scala/ammonite/session/FailureTests.scala +++ b/amm/repl/src/test/scala/ammonite/session/FailureTests.scala @@ -4,11 +4,11 @@ import ammonite.{DualTestRepl, TestUtils} import utest._ import scala.collection.{immutable => imm} -object FailureTests extends TestSuite{ - val tests = Tests{ +object FailureTests extends TestSuite { + val tests = Tests { println("FailureTests") val check = new DualTestRepl() - test("compileFailure"){ + test("compileFailure") { if (check.scala2) check.session(s""" @ doesnt_exist @@ -46,7 +46,7 @@ object FailureTests extends TestSuite{ Compilation Failed """) } - test("compilerCrash"){ + test("compilerCrash") { // Make sure compiler crashes provide the appropriate error // messaging, and the REPL continues functioning after if (TestUtils.scala2_11) check.session(""" @@ -60,21 +60,23 @@ object FailureTests extends TestSuite{ res1: Int = 2 """) } - test("ivyFail"){ + test("ivyFail") { check.session(""" @ import $ivy.`com.lihaoyi::upickle-doest-exist:0.1.0` error: Failed to resolve ivy dependencies """) } - test("exceptionHandling"){ - check.fail("""throw new Exception("lol", new Exception("hoho"))""", x => - // It contains the things we want - x.contains("java.lang.Exception: lol") && - x.contains("java.lang.Exception: hoho") && - // and none of the stuff we don't want - !x.contains("evaluatorRunPrinter") && - !x.contains("Something unexpected went wrong =(") + test("exceptionHandling") { + check.fail( + """throw new Exception("lol", new Exception("hoho"))""", + x => + // It contains the things we want + x.contains("java.lang.Exception: lol") && + x.contains("java.lang.Exception: hoho") && + // and none of the stuff we don't want + !x.contains("evaluatorRunPrinter") && + !x.contains("Something unexpected went wrong =(") ) } @@ -85,9 +87,10 @@ object FailureTests extends TestSuite{ | | | 1 / 0 - |""".stripMargin, x => + |""".stripMargin, + x => x.contains("/ by zero") && - x.contains("cmd0.sc:4") // check that the line number is correct + x.contains("cmd0.sc:4") // check that the line number is correct ) } @@ -96,15 +99,16 @@ object FailureTests extends TestSuite{ test("lineNumbersInStackTrace2") { if (check.scala2) { check.fail( - """ - |{ - | - | // block command - | 1 / 0 - |} - |""".stripMargin, x => - x.contains("/ by zero") && - x.contains("cmd0.sc:5") // check that the line number is correct + """ + |{ + | + | // block command + | 1 / 0 + |} + |""".stripMargin, + x => + x.contains("/ by zero") && + x.contains("cmd0.sc:5") // check that the line number is correct ) } } @@ -122,8 +126,8 @@ object FailureTests extends TestSuite{ |""".stripMargin, x => x.contains("/ by zero") && - !x.contains("cmd0.sc:") && - x.contains("cell0.sc:") + !x.contains("cmd0.sc:") && + x.contains("cell0.sc:") ) } } diff --git a/amm/repl/src/test/scala/ammonite/session/ImportHookTests.scala b/amm/repl/src/test/scala/ammonite/session/ImportHookTests.scala index ee1811d4b..9c45f412a 100644 --- a/amm/repl/src/test/scala/ammonite/session/ImportHookTests.scala +++ b/amm/repl/src/test/scala/ammonite/session/ImportHookTests.scala @@ -9,13 +9,13 @@ import scala.collection.{immutable => imm} import scala.util.Properties import ammonite.util.Util -object ImportHookTests extends TestSuite{ +object ImportHookTests extends TestSuite { - val tests = Tests{ + val tests = Tests { println("ImportHookTests") val check = new DualTestRepl() - test("repl"){ - test("file"){ + test("repl") { + test("file") { test("basic") - check.session(""" @ import $file.amm.src.test.resources.importHooks.Basic @@ -68,15 +68,14 @@ object ImportHookTests extends TestSuite{ error: Cannot resolve $file import """) - test("deepRenamed") - check.session(""" @ import $file.amm.src.test.resources.importHooks.Deep.{DeepObject => DeepRenamed} error: Cannot resolve $file import """) } - test("ivy"){ - test("basic"){ + test("ivy") { + test("basic") { check.session(s""" @ import scalatags.Text.all._ error: ${check.notFound("scalatags")} @@ -90,7 +89,7 @@ object ImportHookTests extends TestSuite{ """) } - test("explicitBinaryVersion"){ + test("explicitBinaryVersion") { val sbv = IvyConstructor.scalaBinaryVersion(check.scalaVersion) check.session(s""" @ import scalatags.Text.all._ @@ -105,7 +104,7 @@ object ImportHookTests extends TestSuite{ """) } - test("inline"){ + test("inline") { check.session(s""" @ import scalatags.Text.all._ error: ${check.notFound("scalatags")} @@ -117,7 +116,7 @@ object ImportHookTests extends TestSuite{ """) } - test("inlineFull"){ + test("inlineFull") { // no more macroparadise in 2.13 if (scala2_12 && scala.util.Properties.versionNumberString != "2.12.10") { check.session(""" @@ -132,11 +131,11 @@ object ImportHookTests extends TestSuite{ } } } - test("url"){ + test("url") { val scriptUrl = "https://raw.githubusercontent.com/lihaoyi/Ammonite/" + - "master/amm/src/test/resources/scripts/Annotation.sc" - test("basic"){ + "master/amm/src/test/resources/scripts/Annotation.sc" + test("basic") { check.session(s""" @ import $$url.`$scriptUrl` error: $$url import failed @@ -147,7 +146,7 @@ object ImportHookTests extends TestSuite{ res1: Int = 24 """) } - test("inline"){ + test("inline") { check.session(s""" @ import $$url.`$scriptUrl` error: $$url import failed @@ -160,7 +159,7 @@ object ImportHookTests extends TestSuite{ } } } - test("scripts"){ + test("scripts") { test("file") - check.session(""" @ import $file.amm.src.test.resources.importHooks.FileImport @@ -175,7 +174,7 @@ object ImportHookTests extends TestSuite{ res1: Int = 31339 """) - test("ivy"){ + test("ivy") { check.session(""" @ import $file.amm.src.test.resources.importHooks.IvyImport diff --git a/amm/repl/src/test/scala/ammonite/session/ImportTests.scala b/amm/repl/src/test/scala/ammonite/session/ImportTests.scala index 2c103622b..a4b5e4f07 100644 --- a/amm/repl/src/test/scala/ammonite/session/ImportTests.scala +++ b/amm/repl/src/test/scala/ammonite/session/ImportTests.scala @@ -6,14 +6,14 @@ import utest._ import scala.collection.{immutable => imm} -object ImportTests extends TestSuite{ +object ImportTests extends TestSuite { - val tests = Tests{ + val tests = Tests { println("ImportTests") val check = new DualTestRepl() - test("basic"){ - test("hello"){ + test("basic") { + test("hello") { check.session(""" @ import math.abs import math.abs @@ -27,7 +27,7 @@ object ImportTests extends TestSuite{ } - test("java"){ + test("java") { check.session(""" @ import Thread._ import Thread._ @@ -45,7 +45,7 @@ object ImportTests extends TestSuite{ res4: Boolean = true """) } - test("multi"){ + test("multi") { check.session(""" @ import math._, Thread._ import math._, Thread._ @@ -58,7 +58,7 @@ object ImportTests extends TestSuite{ """) } - test("renaming"){ + test("renaming") { check.session(s""" @ import math.{abs => sba} @@ -86,8 +86,8 @@ object ImportTests extends TestSuite{ """) } } - test("shadowing"){ - test("sameName"){ + test("shadowing") { + test("sameName") { check.session(""" @ val abs = 'a' abs: Char = 'a' @@ -120,8 +120,8 @@ object ImportTests extends TestSuite{ res9: Int = 4 """) } - test("shadowPrefix"){ - test{ + test("shadowPrefix") { + test { // fixed in 2.11 and 2.12 check.session(raw""" @ object importing_issue { @@ -163,7 +163,7 @@ object ImportTests extends TestSuite{ } - test("typeTermSeparation"){ + test("typeTermSeparation") { // Make sure that you can have a term and a type of the same name // coming from different places and they don't stomp over each other // (#199) and both are accessible. @@ -181,7 +181,7 @@ object ImportTests extends TestSuite{ res3: $aliasedType = 2 """) - test{ + test { val aliasedType = if (check.scala2) "Order" else "Seq" check.session(s""" @@ -232,7 +232,7 @@ object ImportTests extends TestSuite{ error: Compilation Failed """) // Prefix things properly in Scala-2.10 where the type printer is dumb - test("paulp"){ + test("paulp") { if (!check.scala2) "disabled in Scala 3" else check.session(s""" @@ -305,7 +305,7 @@ object ImportTests extends TestSuite{ @ Paulp """) } - test("paulpTypeRegression"){ + test("paulpTypeRegression") { if (!check.scala2) "disabled in Scala 3" else check.session(s""" @ type Paulp = Int @@ -318,7 +318,7 @@ object ImportTests extends TestSuite{ } } } - test("collapsing"){ + test("collapsing") { check.session(""" @ object Foo{ val bar = 1 } @@ -330,7 +330,7 @@ object ImportTests extends TestSuite{ res3: Int = 1 """) } - test("shapelessBugMinimized"){ + test("shapelessBugMinimized") { check.session(""" @ object f{ case class Foo()} @@ -341,7 +341,7 @@ object ImportTests extends TestSuite{ @ Foo """) } - test("shapelessBugFull"){ + test("shapelessBugFull") { if (check.scala2) check.session(""" @ import $ivy.`com.chuusai::shapeless:2.3.7`, shapeless.:: diff --git a/amm/repl/src/test/scala/ammonite/session/ProjectTests.scala b/amm/repl/src/test/scala/ammonite/session/ProjectTests.scala index beaa5dd58..ebce3105a 100644 --- a/amm/repl/src/test/scala/ammonite/session/ProjectTests.scala +++ b/amm/repl/src/test/scala/ammonite/session/ProjectTests.scala @@ -5,17 +5,17 @@ import ammonite.TestUtils._ import utest._ import ammonite.interp.api.IvyConstructor -object ProjectTests extends TestSuite{ - val tests = Tests{ +object ProjectTests extends TestSuite { + val tests = Tests { println("ProjectTests") val check = new DualTestRepl() - test("load"){ - test("ivy"){ - test("standalone"){ - // ivy or maven central are flaky =/ - val tq = "\"\"\"" - check.session( - s""" + test("load") { + test("ivy") { + test("standalone") { + // ivy or maven central are flaky =/ + val tq = "\"\"\"" + check.session( + s""" @ import scalatags.Text.all._ error: ${check.notFound("scalatags")} @@ -26,11 +26,12 @@ object ProjectTests extends TestSuite{ @ a("omg", href:="www.google.com").render res2: String = "omg" - """) + """ + ) } - test("akkahttp"){ - if (scala2_11) check.session( - """ + test("akkahttp") { + if (scala2_11) check.session( + """ @ import $ivy.`com.typesafe.akka::akka-http-experimental:1.0-M3` @ implicit val system = akka.actor.ActorSystem() @@ -62,9 +63,10 @@ object ProjectTests extends TestSuite{ res10: Boolean = true @ system.shutdown() - """) + """ + ) } - test("resolvers"){ + test("resolvers") { check.session(""" @ import $ivy.`com.github.jupyter:jvm-repr:0.4.0` error: Failed to resolve ivy dependencies @@ -78,7 +80,7 @@ object ProjectTests extends TestSuite{ @ import jupyter._ """) } - test("resolversStatic"){ + test("resolversStatic") { check.session(""" @ import $repo.`https://jitpack.io` @@ -88,7 +90,7 @@ object ProjectTests extends TestSuite{ """) } } - test("code"){ + test("code") { check.session(""" @ repl.load("val x = 1") @@ -98,7 +100,7 @@ object ProjectTests extends TestSuite{ } } - test("shapeless"){ + test("shapeless") { if (check.scala2) check.session(""" @ import $ivy.`com.chuusai::shapeless:2.3.3`, shapeless._ @@ -118,7 +120,7 @@ object ProjectTests extends TestSuite{ "Disabled in Scala 3" } - test("scalaz"){ + test("scalaz") { check.session(s""" @ import $$ivy.`org.scalaz::scalaz-core:7.2.27 compat`, scalaz._, Scalaz._ @@ -126,13 +128,13 @@ object ProjectTests extends TestSuite{ res1: Option[Int] = ${Print.Some(value = 3)} """) } - test("cats"){ + test("cats") { check.session(""" @ import $ivy.`org.typelevel::cats-core:2.0.0-M4 compat`, cats._ """) } - test("guava"){ + test("guava") { check.session(""" @ import $ivy.`com.google.guava:guava:18.0`, com.google.common.collect._ @@ -145,7 +147,7 @@ object ProjectTests extends TestSuite{ res3: Int = 2 """) } - test("resources"){ + test("resources") { // Disabled in Scala 3 for now. Getting weird typing errors, like // exception while typing ammonite.ops.Callable1Implicit… // java.lang.AssertionError: assertion failed: @@ -164,7 +166,7 @@ object ProjectTests extends TestSuite{ @ os.read(path) // Should work now """) } - test("scalaparse"){ + test("scalaparse") { // For some reason this blows up in 2.11.x // Prevent regressions when wildcard-importing things called `macro` or `_` if (scala2_12) check.session(s""" @@ -181,7 +183,7 @@ object ProjectTests extends TestSuite{ """) } - test("finagle"){ + test("finagle") { // Prevent regressions when wildcard-importing things called `macro` or `_` check.session(""" @ import $ivy.`com.twitter::finagle-http:21.4.0 compat` @@ -226,9 +228,9 @@ object ProjectTests extends TestSuite{ @ server.close() """) } - test("spire"){ + test("spire") { // Prevent regressions when wildcard-importing things called `macro` or `_` - //buggy in 2.10, spire not yet published for 2.12 + // buggy in 2.10, spire not yet published for 2.12 if (scala2_11) check.session(s""" @ import $$ivy.`org.spire-math::spire:0.11.0` @@ -262,8 +264,8 @@ object ProjectTests extends TestSuite{ @ mean(Rational(1, 2), Rational(3, 2), Rational(0)) res10: Rational = 2/3 """) - } - test("pegdown"){ + } + test("pegdown") { val expectedType = // probably a TPrint bug in Scala 3… if (check.scala2) "org.pegdown.ast.SimpleNode.Type" else "Type" @@ -273,10 +275,11 @@ object ProjectTests extends TestSuite{ @ org.pegdown.ast.SimpleNode.Type.HRule res1: $expectedType = HRule - """) + """ + ) } - test("deeplearning"){ + test("deeplearning") { // DeepLearning.scala 2.0.0-RC0 isn't published for scala 2.13 if (scala2_12) check.session( """ @@ -300,7 +303,7 @@ object ProjectTests extends TestSuite{ ) } - test("jadb"){ + test("jadb") { // tests for jitpack and optional dependencies check.session( """ @@ -351,7 +354,7 @@ object ProjectTests extends TestSuite{ ) } - test("profiles"){ + test("profiles") { val testCore = """ @ import $ivy.`org.apache.spark::spark-sql:2.4.3` @@ -372,7 +375,7 @@ object ProjectTests extends TestSuite{ @ .openStream() @ ) """ - test("default"){ + test("default") { // should load hadoop 2.6 stuff by default if (scala2_12) check.session( @@ -384,7 +387,7 @@ object ProjectTests extends TestSuite{ """ ) } - test("withProfile"){ + test("withProfile") { // with the right profile, should load hadoop 3.1 stuff if (scala2_12) check.session( @@ -405,7 +408,7 @@ object ProjectTests extends TestSuite{ ) } } - test("onlyJarLikeArtifactTypes"){ + test("onlyJarLikeArtifactTypes") { check.session( """ @ import $ivy.`log4j:log4j:1.2.17` @@ -426,7 +429,7 @@ object ProjectTests extends TestSuite{ ) } - test("no sources"){ + test("no sources") { val sbv = IvyConstructor.scalaBinaryVersion(ammonite.compiler.CompilerBuilder.scalaVersion) val core = @@ -445,7 +448,7 @@ object ProjectTests extends TestSuite{ found: Boolean = true """ - test("default"){ + test("default") { check.session( s""" $core @@ -456,7 +459,7 @@ object ProjectTests extends TestSuite{ ) } - test("disabled"){ + test("disabled") { check.session( s""" @ interp.resolutionHooks += { fetch => @@ -473,7 +476,7 @@ object ProjectTests extends TestSuite{ } } - test("extra artifact types"){ + test("extra artifact types") { val core = """ @ import $ivy.`com.almworks.sqlite4java:libsqlite4java-linux-amd64:1.0.392` @@ -490,7 +493,7 @@ object ProjectTests extends TestSuite{ defined function soFound """ - test("default"){ + test("default") { check.session( s""" $core @@ -501,7 +504,7 @@ object ProjectTests extends TestSuite{ ) } - test("with so"){ + test("with so") { check.session( s""" @ interp.resolutionHooks += { fetch => @@ -519,4 +522,3 @@ object ProjectTests extends TestSuite{ } } - diff --git a/amm/repl/src/test/scala/ammonite/session/SerializationTests.scala b/amm/repl/src/test/scala/ammonite/session/SerializationTests.scala index bd614b807..08f58c268 100644 --- a/amm/repl/src/test/scala/ammonite/session/SerializationTests.scala +++ b/amm/repl/src/test/scala/ammonite/session/SerializationTests.scala @@ -5,15 +5,15 @@ import ammonite.TestUtils import ammonite.compiler.CodeClassWrapper import utest._ -object SerializationTests extends TestSuite{ +object SerializationTests extends TestSuite { val tests = if (TestUtils.scala2) scala2Tests else scala3Tests - def scala2Tests = Tests{ + def scala2Tests = Tests { println("SerializationTests") val check = new TestRepl { override def codeWrapper = CodeClassWrapper } - test("closure"){ + test("closure") { // User values from the REPL shouldn't be recomputed upon // deserialization. The test below checks that the value `a`, whose // computation is side-effecting, is indeed serialized along `closure`, @@ -92,7 +92,8 @@ object SerializationTests extends TestSuite{ @ costlySideEffect.absent("after deserialization") @ }} - """) + """ + ) } } def scala3Tests = Tests { diff --git a/amm/repl/src/test/scala/ammonite/testcode/PaulpImports.scala b/amm/repl/src/test/scala/ammonite/testcode/PaulpImports.scala index 252af1879..60f2699a8 100644 --- a/amm/repl/src/test/scala/ammonite/testcode/PaulpImports.scala +++ b/amm/repl/src/test/scala/ammonite/testcode/PaulpImports.scala @@ -1,7 +1,7 @@ package ammonite.testcode package paulp1 { - class Paulp{ + class Paulp { override def toString = "paulp1.Paulp1" } } @@ -12,13 +12,11 @@ package paulp2 { } } - package paulp3 { object Paulp { override def toString = "paulp3.Paulp-object" } - class Paulp{ + class Paulp { override def toString = "paulp3.Paulp-class" } } - diff --git a/amm/repl/src/test/scala/ammonite/unit/ClipboardTests.scala b/amm/repl/src/test/scala/ammonite/unit/ClipboardTests.scala index 61fe6c091..d5dc1cbb8 100644 --- a/amm/repl/src/test/scala/ammonite/unit/ClipboardTests.scala +++ b/amm/repl/src/test/scala/ammonite/unit/ClipboardTests.scala @@ -10,16 +10,16 @@ import utest._ import scala.util.Try -object ClipboardTests extends TestSuite{ +object ClipboardTests extends TestSuite { val clipboard: Clipboard = ClipboardImpl /** - * This test suite requires an environment with access to a window - * service (either under, Windows, MacOs, X11, ...) CI environment - * doesn't satisfy that condition and that is detected in the following - * check in order to skip [[Clipboard]] test when running on CI. - */ + * This test suite requires an environment with access to a window + * service (either under, Windows, MacOs, X11, ...) CI environment + * doesn't satisfy that condition and that is detected in the following + * check in order to skip [[Clipboard]] test when running on CI. + */ val canTest = Try( Toolkit.getDefaultToolkit.getSystemClipboard.isDataFlavorAvailable( DataFlavor.stringFlavor @@ -28,9 +28,9 @@ object ClipboardTests extends TestSuite{ override def tests = Tests { println("ClipboardTests") - test("clipboard"){ + test("clipboard") { val newClipboardContents = "hello Ammonite" - test("copyandpaste"){ + test("copyandpaste") { if (canTest) { clipboard.write(newClipboardContents) assert(clipboard.read == newClipboardContents) diff --git a/amm/repl/src/test/scala/ammonite/unit/HighlightTests.scala b/amm/repl/src/test/scala/ammonite/unit/HighlightTests.scala index 43d46d938..2f6dd4b21 100644 --- a/amm/repl/src/test/scala/ammonite/unit/HighlightTests.scala +++ b/amm/repl/src/test/scala/ammonite/unit/HighlightTests.scala @@ -4,7 +4,7 @@ import ammonite.compiler.CompilerBuilder.scalaVersion import ammonite.compiler.Parsers import utest._ -object HighlightTests extends TestSuite{ +object HighlightTests extends TestSuite { def testHighlight(buffer: Vector[Char]) = Parsers.defaultHighlight( buffer, @@ -30,12 +30,11 @@ object HighlightTests extends TestSuite{ val isScala3_01 = scalaVersion.startsWith("3.0.") || scalaVersion.startsWith("3.1.") val tests = Tests { println("HighlightTests") - test("highlighting"){ - test("fuzz"){ - + test("highlighting") { + test("fuzz") { val paths = os.walk(os.pwd).filter(_.ext == "scala") - for(path <- paths){ + for (path <- paths) { val code = os.read(path) val out = Parsers.defaultHighlight( code.toVector, @@ -51,37 +50,40 @@ object HighlightTests extends TestSuite{ val outLength = out.length val codeLength = code.length - assert{identity(path); outLength == codeLength} - assert{identity(path); out == code} + assert { identity(path); outLength == codeLength } + assert { identity(path); out == code } } paths.length } - test("comment")- check("//a", "") - test("type")- { + test("comment") - check("//a", "") + test("type") - { val expected = if (isScala2) "x: ." else "x: " check("x: y.type", expected) } - test("literal")- check("1", "") - test("expressions")- check("val (x, y) = 1 + 2 + 3", " (x, y) = + + ") - test("interpolation")- check( + test("literal") - check("1", "") + test("expressions") - check( + "val (x, y) = 1 + 2 + 3", + " (x, y) = + + " + ) + test("interpolation") - check( """(s"hello ${world + 1}")""", """(${world + })""" ) - test("runOff")- { + test("runOff") - { val expected = if (isScala2) """( + """ else """( + "Hello...""" // Dotty highlighter doesn't color open strings check("""(1 + "Hello...""", expected) } - test("underscore")- { + test("underscore") - { val expected = if (isScala2) """ = """ else """ _ = """ check("""val _ = 1""", expected) } - test("nonTrivial")- { + test("nonTrivial") - { val underscore = if (isScala2) "" else "_" @@ -113,7 +115,7 @@ object HighlightTests extends TestSuite{ } finally Thread.currentThread().setContextClassLoader(oldClassloader) } yield out """, - s""" processLine(stmts: ${stringColl("Seq")}, + s""" processLine(stmts: ${stringColl("Seq")}, saveHistory: ( => , ) => , printer: ${stringColl("Iterator")} => ) = { $underscore <- Catching { Ex(x@$underscore0) => diff --git a/amm/repl/src/test/scala/ammonite/unit/ParserTests.scala b/amm/repl/src/test/scala/ammonite/unit/ParserTests.scala index 940ce198b..75c142b63 100644 --- a/amm/repl/src/test/scala/ammonite/unit/ParserTests.scala +++ b/amm/repl/src/test/scala/ammonite/unit/ParserTests.scala @@ -3,11 +3,11 @@ package ammonite.unit import ammonite.util.Util import utest._ -object ParserTests extends TestSuite{ +object ParserTests extends TestSuite { val tests = Tests { println("ParserTests") - test("shebang"){ + test("shebang") { def check(original: String, expected: String) = { val skipped = ammonite.interp.Interpreter.skipSheBangLine( ammonite.util.Util.normalizeNewlines(original) @@ -66,7 +66,7 @@ object ParserTests extends TestSuite{ // // Not nearly comprehensive, but hopefully if someone really borks this // somewhat-subtle logic around the parsers, one of these will catch it - test("endOfCommandDetection"){ + test("endOfCommandDetection") { def assertResult(x: String, pred: Option[Either[String, _]] => Boolean) = { val res = ammonite.compiler.Parsers.split(x) assert(pred(res)) @@ -76,7 +76,7 @@ object ParserTests extends TestSuite{ def assertInvalid(x: String) = assertResult(x, res => res.isDefined && res.get.isLeft) - test("endOfCommand"){ + test("endOfCommand") { test - assertComplete("{}") test - assertComplete("foo.bar") test - assertComplete("foo.bar // line comment") @@ -86,7 +86,9 @@ object ParserTests extends TestSuite{ // I hope that one not passing doesn't have unintended consequences… // Once we can use coursier/interface#256 here, use it to compare versions // if (coursierapi.Version.compare("3.1.3", sv) <= 0) - if (sv.startsWith("2.") || sv.startsWith("3.0.") || (sv.startsWith("3.1.") && sv != "3.1.3")) + if ( + sv.startsWith("2.") || sv.startsWith("3.0.") || (sv.startsWith("3.1.") && sv != "3.1.3") + ) assertComplete("va va") // postfix else "Disabled" } @@ -105,7 +107,7 @@ object ParserTests extends TestSuite{ val r = (1 until 1000).view.filter(n => n % 3 == 0 || n % 5 == 0).sum """) } - test("notEndOfCommand"){ + test("notEndOfCommand") { test - assertIncomplete("{") test - assertIncomplete("foo.bar /* incomplete block comment") @@ -115,9 +117,9 @@ object ParserTests extends TestSuite{ test - assertIncomplete(""" val r = (1 until 1000).view.filter(n => n % 3 == 0 || n % 5 == 0 """) - + } - test("commandIsBroken"){ + test("commandIsBroken") { test - assertInvalid("}") test - assertInvalid("{val val ") test - assertInvalid("val val ") @@ -132,8 +134,8 @@ object ParserTests extends TestSuite{ val r <- (1 until 1000).view.filter(n => n % 3 == 0 || n % 5 == 0 """) } - test("examples"){ - test("small"){ + test("examples") { + test("small") { val input = Util.normalizeNewlines( """ for { a <- List(1); @@ -141,7 +143,7 @@ object ParserTests extends TestSuite{ } yield (1, 2)""" ) val lines = Predef.augmentString(input).lines.toVector - for(i <- 1 until lines.length) { + for (i <- 1 until lines.length) { val prefix = lines.take(i).mkString(Util.newLine) // Only the entire input, which is the last prefix, is complete. // All others are incomplete @@ -149,7 +151,7 @@ object ParserTests extends TestSuite{ else assertIncomplete(prefix) } } - test("medium"){ + test("medium") { val input = Util.normalizeNewlines( """ val ls = for(y <- 1900 to 2000; m <- 1 to 12) yield { if(m == 2) @@ -159,7 +161,7 @@ object ParserTests extends TestSuite{ } """ ) val lines = Predef.augmentString(input).lines.toVector - for(i <- 1 until lines.length) { + for (i <- 1 until lines.length) { val prefix = lines.take(i).mkString(Util.newLine) // Only the entire input, which is the last prefix, is complete. // All others are incomplete @@ -167,7 +169,7 @@ object ParserTests extends TestSuite{ else assertIncomplete(prefix) } } - test("big"){ + test("big") { val input = Util.normalizeNewlines( """import play.core.server._, play.api.routing.sird._, play.api.mvc._ // 0 import scalaj.http._ // 1 @@ -201,7 +203,7 @@ object ParserTests extends TestSuite{ // is a valid, complete input. Every other line *should* be incomplete, // and no prefix in this example should be invalid val completeLines = Set(0, 1, 20, 25) - for(i <- 0 until lines.length) { + for (i <- 0 until lines.length) { val prefix = lines.take(i + 1).mkString(Util.newLine) if (completeLines(i)) assertComplete(prefix) diff --git a/amm/repl/src/test/scala/ammonite/unit/SourceTests.scala b/amm/repl/src/test/scala/ammonite/unit/SourceTests.scala index 60f67d8ae..db5f8c9e9 100644 --- a/amm/repl/src/test/scala/ammonite/unit/SourceTests.scala +++ b/amm/repl/src/test/scala/ammonite/unit/SourceTests.scala @@ -1,6 +1,5 @@ package ammonite.unit - import utest._ import ammonite.compiler.tools.source.load import ammonite.TestUtils @@ -8,13 +7,12 @@ import ammonite.util.Util import ammonite.util.Util.Location import java.io.InputStream -object SourceTests extends TestSuite{ +object SourceTests extends TestSuite { val tests = if (TestUtils.scala2) scala2Tests else scala3Tests - def scala2Tests = Tests{ + def scala2Tests = Tests { def check(loaded: Location, expectedFileName: String, expected: String, slop: Int = 10) = { - val loadedFileName = loaded.fileName assert(loadedFileName == expectedFileName) // The line number from first bytecode of earliest concrete method @@ -27,16 +25,15 @@ object SourceTests extends TestSuite{ assert(nearby.contains(expected)) } - - test("objectInfo"){ - test("thirdPartyJava"){ + test("objectInfo") { + test("thirdPartyJava") { check( load(new javassist.ClassPool()), "ClassPool.java", "public class ClassPool" ) } - test("thirdPartyScala"){ + test("thirdPartyScala") { // Not published for 2.10 // check( // load(shapeless.::), @@ -49,12 +46,12 @@ object SourceTests extends TestSuite{ // "class TokensReader" // ) } - test("stdLibScala"){ - test("direct"){ + test("stdLibScala") { + test("direct") { val is2_13_3_orLower = { val ver = scala.util.Properties.versionNumberString !ver.startsWith("2.13.") || - scala.util.Try(ver.stripPrefix("2.13.").toInt).toOption.exists(_ <= 3) + scala.util.Try(ver.stripPrefix("2.13.").toInt).toOption.exists(_ <= 3) } check( load(Nil), @@ -63,7 +60,7 @@ object SourceTests extends TestSuite{ else "val Nil = scala.collection.immutable.Nil" ) } - test("runtimeTyped"){ + test("runtimeTyped") { val empty: Seq[Int] = Seq() val nonEmpty: Seq[Int] = Seq(1) check( @@ -80,8 +77,8 @@ object SourceTests extends TestSuite{ } } - test("objectMemberInfo"){ - test("thirdPartyJava"){ + test("objectMemberInfo") { + test("thirdPartyJava") { val pool = new javassist.ClassPool() check( load(pool.find _), @@ -95,7 +92,7 @@ object SourceTests extends TestSuite{ "public URL find(String classname)" ) } - test("void"){ + test("void") { check( load(Predef.println()), "Predef.scala", @@ -103,7 +100,7 @@ object SourceTests extends TestSuite{ ) } - test("overloaded"){ + test("overloaded") { val pool = new javassist.ClassPool() check( load(pool.makeClass(_: InputStream)), @@ -121,7 +118,7 @@ object SourceTests extends TestSuite{ "public CtClass makeClass(ClassFile classfile, boolean ifNotFrozen)" ) } - test("implementedBySubclass"){ + test("implementedBySubclass") { val opt: Option[Int] = Option(1) check( load(opt.get), diff --git a/amm/runtime/src/main/scala/ammonite/runtime/ClassLoaders.scala b/amm/runtime/src/main/scala/ammonite/runtime/ClassLoaders.scala index e1007094f..f69253c1b 100644 --- a/amm/runtime/src/main/scala/ammonite/runtime/ClassLoaders.scala +++ b/amm/runtime/src/main/scala/ammonite/runtime/ClassLoaders.scala @@ -5,29 +5,26 @@ import java.net.{URL, URLClassLoader, URLConnection, URLStreamHandler} import java.nio.ByteBuffer import java.util.Collections - - import ammonite.util.{Imports, Util} import scala.annotation.tailrec import scala.collection.mutable - - - /** - * Represents a single "frame" of the `sess.save`/`sess.load` stack/tree. - * - * Exposes `imports` and `classpath` as readable but only writable - * in particular ways: `imports` can only be updated via `mergeImports`, - * while `classpath` can only be added to. - */ -class Frame(val classloader: SpecialClassLoader, - val pluginClassloader: SpecialClassLoader, - private[this] var imports0: Imports, - private[this] var classpath0: Seq[java.net.URL], - private[this] var usedEarlierDefinitions0: Seq[String], - private[this] var hooks0: Seq[ammonite.util.Frame.Hook]) extends ammonite.util.Frame{ + * Represents a single "frame" of the `sess.save`/`sess.load` stack/tree. + * + * Exposes `imports` and `classpath` as readable but only writable + * in particular ways: `imports` can only be updated via `mergeImports`, + * while `classpath` can only be added to. + */ +class Frame( + val classloader: SpecialClassLoader, + val pluginClassloader: SpecialClassLoader, + private[this] var imports0: Imports, + private[this] var classpath0: Seq[java.net.URL], + private[this] var usedEarlierDefinitions0: Seq[String], + private[this] var hooks0: Seq[ammonite.util.Frame.Hook] +) extends ammonite.util.Frame { private var frozen0 = false def frozen = frozen0 def freeze(): Unit = { @@ -73,14 +70,14 @@ class Frame(val classloader: SpecialClassLoader, hooks0 = hooks0 :+ hook } } -object Frame{ +object Frame { def createInitial(baseClassLoader: ClassLoader = Thread.currentThread().getContextClassLoader) = { // *Try* to load the JVM source files and make them available as resources, // so that the `source` helper can navigate to the sources within the // Java standard library - val likelyJdkSourceLocation = os.Path(System.getProperty("java.home"))/os.up/"src.zip" + val likelyJdkSourceLocation = os.Path(System.getProperty("java.home")) / os.up / "src.zip" val hash = SpecialClassLoader.initialClasspathSignature(baseClassLoader) def special = new SpecialClassLoader( new ForkClassLoader(baseClassLoader, getClass.getClassLoader), @@ -93,11 +90,13 @@ object Frame{ } } -case class SessionChanged(removedImports: Set[scala.Symbol], - addedImports: Set[scala.Symbol], - removedJars: Set[java.net.URL], - addedJars: Set[java.net.URL]) extends ammonite.repl.api.SessionChanged -object SessionChanged{ +case class SessionChanged( + removedImports: Set[scala.Symbol], + addedImports: Set[scala.Symbol], + removedJars: Set[java.net.URL], + addedJars: Set[java.net.URL] +) extends ammonite.repl.api.SessionChanged +object SessionChanged { def delta(oldFrame: Frame, newFrame: Frame): SessionChanged = { def frameSymbols(f: Frame) = f.imports.value.map(_.toName.backticked).map(Symbol(_)).toSet @@ -110,29 +109,28 @@ object SessionChanged{ } } - -object SpecialClassLoader{ +object SpecialClassLoader { val simpleNameRegex = "[a-zA-Z0-9_]+".r /** - * Stats all jars on the classpath, and loose class-files in the current - * classpath that could conceivably be part of some package, and aggregates - * their names and mtimes as a "signature" of the current classpath - * - * When looking for loose class files, we skip folders whose names are not - * valid java identifiers. Otherwise, the "current classpath" often contains - * the current directory, which in an SBT or Maven project contains hundreds - * or thousands of files which are not on the classpath. Empirically, this - * heuristic improves perf by greatly cutting down on the amount of files we - * need to mtime in many common cases. - */ + * Stats all jars on the classpath, and loose class-files in the current + * classpath that could conceivably be part of some package, and aggregates + * their names and mtimes as a "signature" of the current classpath + * + * When looking for loose class files, we skip folders whose names are not + * valid java identifiers. Otherwise, the "current classpath" often contains + * the current directory, which in an SBT or Maven project contains hundreds + * or thousands of files which are not on the classpath. Empirically, this + * heuristic improves perf by greatly cutting down on the amount of files we + * need to mtime in many common cases. + */ def initialClasspathSignature( - classloader: ClassLoader + classloader: ClassLoader ): Seq[(Either[String, java.net.URL], Long)] = { val allClassloaders = { val all = mutable.Buffer.empty[ClassLoader] var current = classloader - while(current != null && current != ClassLoader.getSystemClassLoader){ + while (current != null && current != ClassLoader.getSystemClassLoader) { all.append(current) current = current.getParent } @@ -152,17 +150,16 @@ object SpecialClassLoader{ } } - val classpathRoots = allClassloaders - .collect{case cl: java.net.URLClassLoader => cl.getURLs} + .collect { case cl: java.net.URLClassLoader => cl.getURLs } .flatten val bootClasspathRoots = sys.props("java.class.path") .split(java.io.File.pathSeparator) .map(java.nio.file.Paths.get(_).toAbsolutePath.toUri.toURL) - val mtimes = (bootClasspathRoots ++ classpathRoots).flatMap{ p => + val mtimes = (bootClasspathRoots ++ classpathRoots).flatMap { p => if (p.getProtocol == "file") { val f = java.nio.file.Paths.get(p.toURI) if (!java.nio.file.Files.exists(f)) Nil @@ -196,32 +193,33 @@ object SpecialClassLoader{ } /** - * Try to load resources from two parents; necessary to get Ammonite's source - * code browsing to work in SBT projects because SBT messes up the context - * classloader https://stackoverflow.com/q/44237791/871202 - */ + * Try to load resources from two parents; necessary to get Ammonite's source + * code browsing to work in SBT projects because SBT messes up the context + * classloader https://stackoverflow.com/q/44237791/871202 + */ class ForkClassLoader(realParent: ClassLoader, fakeParent: ClassLoader) - extends ClassLoader(realParent){ + extends ClassLoader(realParent) { // This delegates to the parent automatically override def findResource(name: String) = fakeParent.getResource(name) } /** - * Classloader used to implement the jar-downloading - * command-evaluating logic in Ammonite. - * - * http://stackoverflow.com/questions/3544614/how-is-the-control-flow-to-findclass-of - */ -class SpecialClassLoader(parent: ClassLoader, - parentSignature: Seq[(Either[String, java.net.URL], Long)], - var specialLocalClasses: Set[String], - urls: URL*) - extends ammonite.util.ReplClassLoader(urls.toArray, parent){ + * Classloader used to implement the jar-downloading + * command-evaluating logic in Ammonite. + * + * http://stackoverflow.com/questions/3544614/how-is-the-control-flow-to-findclass-of + */ +class SpecialClassLoader( + parent: ClassLoader, + parentSignature: Seq[(Either[String, java.net.URL], Long)], + var specialLocalClasses: Set[String], + urls: URL* +) extends ammonite.util.ReplClassLoader(urls.toArray, parent) { /** - * Files which have been compiled, stored so that our special - * classloader can get at them. - */ + * Files which have been compiled, stored so that our special + * classloader can get at them. + */ val newFileDict = mutable.Map.empty[String, Array[Byte]] def addClassFile(name: String, bytes: Array[Byte]) = { val tuple = Left(name) -> bytes.sum.hashCode().toLong @@ -235,17 +233,16 @@ class SpecialClassLoader(parent: ClassLoader, else if (newFileDict.contains(name)) { val bytes = newFileDict(name) defineClass(name, bytes, 0, bytes.length) - }else if (specialLocalClasses(name)) { - + } else if (specialLocalClasses(name)) { val parts = name.split('.') val resource = os.resource / parts.dropRight(1) / (parts.last + ".class") val bytes = try Some(os.read.bytes(resource)) - catch{case e: os.ResourceNotFoundException => None} + catch { case e: os.ResourceNotFoundException => None } - bytes match{ + bytes match { case Some(b) => defineClass(name, b, 0, b.length) case None => super.findClass(name) } @@ -294,16 +291,16 @@ class SpecialClassLoader(parent: ClassLoader, // Include the current working directory in the classpath hash, to make // sure different scripts cached wd.map(_.toString.getBytes).iterator ++ - classpathSignature0.iterator.map { case (path, long) => - val buffer = ByteBuffer.allocate(8) - buffer.putLong(long) - path.toString.getBytes ++ buffer.array() - } + classpathSignature0.iterator.map { case (path, long) => + val buffer = ByteBuffer.allocate(8) + buffer.putLong(long) + path.toString.getBytes ++ buffer.array() + } ) } def allJars: Seq[URL] = { - this.getURLs ++ ( parent match{ + this.getURLs ++ (parent match { case t: SpecialClassLoader => t.allJars case _ => Nil }) @@ -315,18 +312,22 @@ class SpecialClassLoader(parent: ClassLoader, override def findResources(name: String) = getURLFromFileDict(name) match { case Some(u) => Collections.enumeration(Collections.singleton(u)) - case None => super.findResources(name) + case None => super.findResources(name) } private def getURLFromFileDict(name: String) = { val className = name.stripSuffix(".class").replace('/', '.') newFileDict.get(className) map { x => - new URL(null, s"memory:${name}", new URLStreamHandler { - override def openConnection(url: URL): URLConnection = new URLConnection(url) { - override def connect() = () - override def getInputStream = new ByteArrayInputStream(x) + new URL( + null, + s"memory:${name}", + new URLStreamHandler { + override def openConnection(url: URL): URLConnection = new URLConnection(url) { + override def connect() = () + override def getInputStream = new ByteArrayInputStream(x) + } } - }) + ) } } @@ -334,23 +335,21 @@ class SpecialClassLoader(parent: ClassLoader, // FIXME Not tailrec - val newParent = - if (parent == null) - getParent match { - case s: SpecialClassLoader => s.cloneClassLoader() - case p => p - } - else - parent - - val clone = new SpecialClassLoader(newParent, - parentSignature, - specialLocalClasses, - getURLs.toSeq: _*) - clone.newFileDict ++= newFileDict - clone.classpathSignature0 = classpathSignature0 - - clone + val newParent = + if (parent == null) + getParent match { + case s: SpecialClassLoader => s.cloneClassLoader() + case p => p + } + else + parent + + val clone = + new SpecialClassLoader(newParent, parentSignature, specialLocalClasses, getURLs.toSeq: _*) + clone.newFileDict ++= newFileDict + clone.classpathSignature0 = classpathSignature0 + + clone } def inMemoryClasses: Map[String, Array[Byte]] = diff --git a/amm/runtime/src/main/scala/ammonite/runtime/Evaluator.scala b/amm/runtime/src/main/scala/ammonite/runtime/Evaluator.scala index bbd6334f1..f1b7ac319 100644 --- a/amm/runtime/src/main/scala/ammonite/runtime/Evaluator.scala +++ b/amm/runtime/src/main/scala/ammonite/runtime/Evaluator.scala @@ -11,67 +11,67 @@ import scala.util.Try /** * Evaluates already-compiled Bytecode. - * - * Deals with all the munging of `Classloader`s, `Class[_]` objects, - * and `Array[Byte]`s representing class files, and reflection necessary - * to take the already-compile Scala bytecode and execute it in our process. + * + * Deals with all the munging of `Classloader`s, `Class[_]` objects, + * and `Array[Byte]`s representing class files, and reflection necessary + * to take the already-compile Scala bytecode and execute it in our process. */ -trait Evaluator{ +trait Evaluator { def loadClass(wrapperName: String, classFiles: ClassFiles): Res[Class[_]] def evalMain(cls: Class[_], contextClassloader: ClassLoader): Any - - def processLine(output: ClassFiles, - newImports: Imports, - usedEarlierDefinitions: Seq[String], - printer: Printer, - indexedWrapperName: Name, - wrapperPath: Seq[Name], - silent: Boolean, - contextClassLoader: ClassLoader): Res[Evaluated] - - def processScriptBlock(cls: Class[_], - newImports: Imports, - usedEarlierDefinitions: Seq[String], - wrapperName: Name, - wrapperPath: Seq[Name], - pkgName: Seq[Name], - contextClassLoader: ClassLoader): Res[Evaluated] + def processLine( + output: ClassFiles, + newImports: Imports, + usedEarlierDefinitions: Seq[String], + printer: Printer, + indexedWrapperName: Name, + wrapperPath: Seq[Name], + silent: Boolean, + contextClassLoader: ClassLoader + ): Res[Evaluated] + + def processScriptBlock( + cls: Class[_], + newImports: Imports, + usedEarlierDefinitions: Seq[String], + wrapperName: Name, + wrapperPath: Seq[Name], + pkgName: Seq[Name], + contextClassLoader: ClassLoader + ): Res[Evaluated] } -object Evaluator{ +object Evaluator { type InvEx = InvocationTargetException type InitEx = ExceptionInInitializerError /** - * We unwrap many of the "common" cases where the user's actual - * exception is wrapped in a bunch of InvocationTargetException - * wrappers, since it's the users exception they probably care about - */ + * We unwrap many of the "common" cases where the user's actual + * exception is wrapped in a bunch of InvocationTargetException + * wrappers, since it's the users exception they probably care about + */ val userCodeExceptionHandler: PartialFunction[Throwable, Res.Failing] = { // Exit - case Ex(_: InvEx, _: InitEx, AmmoniteExit(value)) => Res.Exit(value) + case Ex(_: InvEx, _: InitEx, AmmoniteExit(value)) => Res.Exit(value) // Interrupted during pretty-printing - case Ex(e: ThreadDeath) => interrupted(e) + case Ex(e: ThreadDeath) => interrupted(e) // Interrupted during evaluation - case Ex(_: InvEx, e: ThreadDeath) => interrupted(e) + case Ex(_: InvEx, e: ThreadDeath) => interrupted(e) - case Ex(_: InvEx, _: InitEx, userEx@_*) => Res.Exception(userEx(0), "") - case Ex(_: InvEx, userEx@_*) => Res.Exception(userEx(0), "") - case Ex(userEx@_*) => Res.Exception(userEx(0), "") + case Ex(_: InvEx, _: InitEx, userEx @ _*) => Res.Exception(userEx(0), "") + case Ex(_: InvEx, userEx @ _*) => Res.Exception(userEx(0), "") + case Ex(userEx @ _*) => Res.Exception(userEx(0), "") } - def interrupted(e: Throwable) = { Thread.interrupted() Res.Failure(newLine + "Interrupted! (`repl.lastException.printStackTrace` for details)") } - def apply(headFrame: => Frame): Evaluator = new Evaluator{ eval => - - + def apply(headFrame: => Frame): Evaluator = new Evaluator { eval => def loadClass(fullName: String, classFiles: ClassFiles): Res[Class[_]] = { Res[Class[_]]( Try { @@ -81,13 +81,12 @@ object Evaluator{ headFrame.classloader.findClass(fullName) }, - e =>"Failed to load compiled class " + e + e => "Failed to load compiled class " + e ) } - def evalMain(cls: Class[_], contextClassloader: ClassLoader) = - Util.withContextClassloader(contextClassloader){ + Util.withContextClassloader(contextClassloader) { val (method, instance) = try { @@ -108,17 +107,19 @@ object Evaluator{ method.invoke(instance) } - def processLine(classFiles: Util.ClassFiles, - newImports: Imports, - usedEarlierDefinitions: Seq[String], - printer: Printer, - indexedWrapperName: Name, - wrapperPath: Seq[Name], - silent: Boolean, - contextClassLoader: ClassLoader) = { + def processLine( + classFiles: Util.ClassFiles, + newImports: Imports, + usedEarlierDefinitions: Seq[String], + printer: Printer, + indexedWrapperName: Name, + wrapperPath: Seq[Name], + silent: Boolean, + contextClassLoader: ClassLoader + ) = { for { cls <- loadClass("ammonite.$sess." + indexedWrapperName.backticked, classFiles) - _ <- Catching{userCodeExceptionHandler} + _ <- Catching { userCodeExceptionHandler } } yield { headFrame.usedEarlierDefinitions = usedEarlierDefinitions @@ -138,16 +139,17 @@ object Evaluator{ } } - - def processScriptBlock(cls: Class[_], - newImports: Imports, - usedEarlierDefinitions: Seq[String], - wrapperName: Name, - wrapperPath: Seq[Name], - pkgName: Seq[Name], - contextClassLoader: ClassLoader) = { + def processScriptBlock( + cls: Class[_], + newImports: Imports, + usedEarlierDefinitions: Seq[String], + wrapperName: Name, + wrapperPath: Seq[Name], + pkgName: Seq[Name], + contextClassLoader: ClassLoader + ) = { for { - _ <- Catching{userCodeExceptionHandler} + _ <- Catching { userCodeExceptionHandler } } yield { headFrame.usedEarlierDefinitions = usedEarlierDefinitions evalMain(cls, contextClassLoader) @@ -156,13 +158,15 @@ object Evaluator{ } } - def evaluationResult(wrapperName: Seq[Name], - internalWrapperPath: Seq[Name], - imports: Imports) = { + def evaluationResult( + wrapperName: Seq[Name], + internalWrapperPath: Seq[Name], + imports: Imports + ) = { Evaluated( wrapperName, Imports( - for(id <- imports.value) yield { + for (id <- imports.value) yield { val filledPrefix = if (internalWrapperPath.isEmpty) { val filledPrefix = @@ -209,6 +213,4 @@ object Evaluator{ */ def evaluatorRunPrinter(f: => Unit) = f - - } diff --git a/amm/runtime/src/main/scala/ammonite/runtime/ImportHook.scala b/amm/runtime/src/main/scala/ammonite/runtime/ImportHook.scala index 3151b9c73..8de4b1f36 100644 --- a/amm/runtime/src/main/scala/ammonite/runtime/ImportHook.scala +++ b/amm/runtime/src/main/scala/ammonite/runtime/ImportHook.scala @@ -16,28 +16,30 @@ import scala.collection.JavaConverters._ import scala.util.{Failure, Success, Try} /** - * An extensible hook into the Ammonite REPL's import system; allows the end - * user to hook into `import $foo.bar.{baz, qux => qua}` syntax, and in - * response load jars or process source files before the "current" compilation - * unit is run. Can be used to load script files, ivy dependencies, jars, or - * files from the web. - */ -trait ImportHook{ + * An extensible hook into the Ammonite REPL's import system; allows the end + * user to hook into `import $foo.bar.{baz, qux => qua}` syntax, and in + * response load jars or process source files before the "current" compilation + * unit is run. Can be used to load script files, ivy dependencies, jars, or + * files from the web. + */ +trait ImportHook { + /** - * Handle a parsed import that this import hook was registered to be interested in - * - * Note that `source` is optional; not every piece of code has a source. Most *user* - * code does, e.g. a repl session is based in their CWD, a script has a path, but - * some things like hardcoded builtin predefs don't - */ - def handle(source: CodeSource, - tree: ImportTree, - interp: ImportHook.InterpreterInterface, - wrapperPath: Seq[Name]) - : Either[String, Seq[ImportHook.Result]] + * Handle a parsed import that this import hook was registered to be interested in + * + * Note that `source` is optional; not every piece of code has a source. Most *user* + * code does, e.g. a repl session is based in their CWD, a script has a path, but + * some things like hardcoded builtin predefs don't + */ + def handle( + source: CodeSource, + tree: ImportTree, + interp: ImportHook.InterpreterInterface, + wrapperPath: Seq[Name] + ): Either[String, Seq[ImportHook.Result]] } -object ImportHook{ +object ImportHook { val defaults = Map[Seq[String], ImportHook]( Seq("url") -> URL, @@ -51,10 +53,10 @@ object ImportHook{ ) /** - * The minimal interface that is exposed to the import hooks from the - * Interpreter. Open for extension, if someone needs more stuff, but by - * default this is what is available. - */ + * The minimal interface that is exposed to the import hooks from the + * Interpreter. Open for extension, if someone needs more stuff, but by + * default this is what is available. + */ trait InterpreterInterface { def loadIvy(coordinates: Dependency*): Either[String, Seq[JFile]] def watch(p: os.Path): Unit @@ -62,19 +64,17 @@ object ImportHook{ } /** - * The result of processing an [[ImportHook]]. Can be either a source-file - * to evaluate, or additional files/folders/jars to put on the classpath - */ + * The result of processing an [[ImportHook]]. Can be either a source-file + * to evaluate, or additional files/folders/jars to put on the classpath + */ sealed trait Result - object Result{ - case class Source(code: String, - codeSource: CodeSource, - hookImports: Imports, - exec: Boolean) extends Result + object Result { + case class Source(code: String, codeSource: CodeSource, hookImports: Imports, exec: Boolean) + extends Result case class ClassPath( - origin: Option[Seq[coursierapi.Dependency]], - files: Seq[os.Path], - plugin: Boolean + origin: Option[Seq[coursierapi.Dependency]], + files: Seq[os.Path], + plugin: Boolean ) extends Result case class Repo(repo: coursierapi.Repository) extends Result } @@ -82,66 +82,75 @@ object ImportHook{ object File extends SourceHook(false) object Exec extends SourceHook(true) - def resolveFiles(tree: ImportTree, currentScriptPath: os.Path, extensions: Seq[String]) - : (Seq[(os.RelPath, Option[String])], Seq[os.Path], Seq[os.Path]) = { + def resolveFiles( + tree: ImportTree, + currentScriptPath: os.Path, + extensions: Seq[String] + ): (Seq[(os.RelPath, Option[String])], Seq[os.Path], Seq[os.Path]) = { val relative = if (tree.prefix.isEmpty) os.rel else tree.prefix - .map{case ammonite.util.Util.upPathSegment => os.up; case x => os.rel/x} - .reduce(_/_) + .map { case ammonite.util.Util.upPathSegment => os.up; case x => os.rel / x } + .reduce(_ / _) - val relativeModules = tree.mappings match{ + val relativeModules = tree.mappings match { case None => Seq(relative -> None) - case Some(mappings) => for((k, v) <- mappings) yield relative/k -> v + case Some(mappings) => for ((k, v) <- mappings) yield relative / k -> v } def relToFile(relative: os.RelPath) = { - val base = currentScriptPath/os.up/relative - extensions.find(ext => os.exists(base/os.up/(relative.last + ext))) match{ - case Some(p) => Right(base/os.up/(relative.last + p): os.Path) + val base = currentScriptPath / os.up / relative + extensions.find(ext => os.exists(base / os.up / (relative.last + ext))) match { + case Some(p) => Right(base / os.up / (relative.last + p): os.Path) case None => Left(base) } } val resolved = relativeModules.map(x => relToFile(x._1)) - val missing = resolved.collect{case Left(p) => p} - val files = resolved.collect{case Right(p) => p} + val missing = resolved.collect { case Left(p) => p } + val files = resolved.collect { case Right(p) => p } (relativeModules, files, missing) } class SourceHook(exec: Boolean) extends ImportHook { // import $file.foo.Bar, to import the file `foo/Bar.sc` - def handle(source: CodeSource, - tree: ImportTree, - interp: InterpreterInterface, - wrapperPath: Seq[Name]) = { - - source.path match{ + def handle( + source: CodeSource, + tree: ImportTree, + interp: InterpreterInterface, + wrapperPath: Seq[Name] + ) = { + + source.path match { case None => Left("Cannot resolve $file import in code without source") case Some(currentScriptPath) => - val (relativeModules, files, missing) = resolveFiles( - tree, currentScriptPath, Seq(".sc") + tree, + currentScriptPath, + Seq(".sc") ) files.foreach(interp.watch) - missing.foreach(x => interp.watch(x/os.up/(x.last + ".sc"))) + missing.foreach(x => interp.watch(x / os.up / (x.last + ".sc"))) if (missing.nonEmpty) { Left("Cannot resolve $file import: " + missing.map(s => s"$s.sc").mkString(", ")) } else { Right( - for(((relativeModule, rename), filePath) <- relativeModules.zip(files)) yield { + for (((relativeModule, rename), filePath) <- relativeModules.zip(files)) yield { val (flexiblePkg, wrapper) = Util.pathToPackageWrapper( - source.flexiblePkgName, filePath relativeTo currentScriptPath/os.up + source.flexiblePkgName, + filePath relativeTo currentScriptPath / os.up ) val fullPrefix = source.pkgRoot ++ flexiblePkg ++ Seq(wrapper) ++ wrapperPath val importData = Seq(ImportData( - fullPrefix.last, Name(rename.getOrElse(relativeModule.last)), - fullPrefix.dropRight(1), ImportData.TermType + fullPrefix.last, + Name(rename.getOrElse(relativeModule.last)), + fullPrefix.dropRight(1), + ImportData.TermType )) val codeSrc = CodeSource( @@ -167,9 +176,9 @@ object ImportHook{ object Ivy extends BaseIvy(plugin = false) object PluginIvy extends BaseIvy(plugin = true) - class BaseIvy(plugin: Boolean) extends ImportHook{ + class BaseIvy(plugin: Boolean) extends ImportHook { def splitImportTree(tree: ImportTree): Either[String, Seq[String]] = { - tree match{ + tree match { case ImportTree(Seq(part), None, _, _) => Right(Seq(part)) case ImportTree(Nil, Some(mapping), _, _) if mapping.map(_._2).forall(_.isEmpty) => Right(mapping.map(_._1)) @@ -177,8 +186,8 @@ object ImportHook{ } } def resolve( - interp: InterpreterInterface, - signatures: Seq[String] + interp: InterpreterInterface, + signatures: Seq[String] ): Either[String, (Seq[Dependency], Seq[JFile])] = { val splitted = for (signature <- signatures) yield { val (dottyCompat, coords) = @@ -186,7 +195,11 @@ object ImportHook{ else (false, signature) DependencyParser.parse(coords).map { dep => val scalaVersion = - if ((dottyCompat || dep.userParams.get("compat").nonEmpty) && !interp.scalaVersion.startsWith("2.")) + if ( + (dottyCompat || dep.userParams.get( + "compat" + ).nonEmpty) && !interp.scalaVersion.startsWith("2.") + ) // When dotty compatibility is enabled, pull Scala 2.13 dependencies rather than Scala 3 ones. // versionNumberString gives us the right 2.13 version for the current Scala 3 version. scala.util.Properties.versionNumberString @@ -196,19 +209,20 @@ object ImportHook{ dep.applyParams(params).toCs } } - val errors = splitted.collect{case Left(error) => error} - val successes = splitted.collect{case Right(v) => v} + val errors = splitted.collect { case Left(error) => error } + val successes = splitted.collect { case Right(v) => v } if (errors.nonEmpty) Left("Invalid $ivy imports: " + errors.map(Util.newLine + " " + _).mkString) else interp.loadIvy(successes: _*).map((successes, _)) } - - def handle(source: CodeSource, - tree: ImportTree, - interp: InterpreterInterface, - wrapperPath: Seq[Name]): Either[String, Seq[Result.ClassPath]] = for{ + def handle( + source: CodeSource, + tree: ImportTree, + interp: InterpreterInterface, + wrapperPath: Seq[Name] + ): Either[String, Seq[Result.ClassPath]] = for { signatures <- splitImportTree(tree) depsResolved <- resolve(interp, signatures) (deps, resolved) = depsResolved @@ -216,11 +230,13 @@ object ImportHook{ } object Classpath extends BaseClasspath(plugin = false) object PluginClasspath extends BaseClasspath(plugin = true) - class BaseClasspath(plugin: Boolean) extends ImportHook{ - def handle(source: CodeSource, - tree: ImportTree, - interp: InterpreterInterface, - wrapperPath: Seq[Name]): Either[String, Seq[Result]] = { + class BaseClasspath(plugin: Boolean) extends ImportHook { + def handle( + source: CodeSource, + tree: ImportTree, + interp: InterpreterInterface, + wrapperPath: Seq[Name] + ): Either[String, Seq[Result]] = { val singleElemOpt = (tree.prefix, tree.mappings) match { // for Scala 2 case (Seq(elem), None) => Some(elem) @@ -229,16 +245,21 @@ object ImportHook{ case _ => None } singleElemOpt match { - case Some(elem) if elem.contains(JFile.pathSeparator) || elem.contains(JFile.separator) || elem.contains("/") || elem.contains("${") => + case Some(elem) + if elem.contains(JFile.pathSeparator) || elem.contains( + JFile.separator + ) || elem.contains("/") || elem.contains("${") => val cwd = source.path.fold(os.pwd)(_ / os.up) val cp = ClassPathUtil.classPath(elem).map(os.Path(_, cwd)) Right(Seq(Result.ClassPath(None, cp, plugin))) case _ => - source.path match{ + source.path match { case None => Left("Cannot resolve $cp import in code without source") case Some(currentScriptPath) => val (relativeModules, files, missing) = resolveFiles( - tree, currentScriptPath, Seq(".jar", "") + tree, + currentScriptPath, + Seq(".jar", "") ) if (missing.nonEmpty) @@ -271,10 +292,12 @@ object ImportHook{ } } - override def handle(source: CodeSource, - tree: ImportTree, - interp: InterpreterInterface, - wrapperPath: Seq[Name]): Either[String, Seq[Result]] = { + override def handle( + source: CodeSource, + tree: ImportTree, + interp: InterpreterInterface, + wrapperPath: Seq[Name] + ): Either[String, Seq[Result]] = { Try(resolveURLs(tree)) match { case Failure(e) => Left(e.getMessage) @@ -282,13 +305,14 @@ object ImportHook{ Right(urlMappings.map { case (uri, rename) => val inputStream = uri.toURL.openStream() - val code = try { - val baos = new ByteArrayOutputStream() - os.Internals.transfer(inputStream, baos) - new String(baos.toByteArray) - } finally{ - inputStream.close() - } + val code = + try { + val baos = new ByteArrayOutputStream() + os.Internals.transfer(inputStream, baos) + new String(baos.toByteArray) + } finally { + inputStream.close() + } val codeSrc = CodeSource( Name(uri.toString), @@ -299,14 +323,16 @@ object ImportHook{ val fullPrefix = source.pkgRoot ++ Seq(Name(uri.toString)) ++ wrapperPath val importData = Seq(ImportData( - fullPrefix.last, Name(rename), - fullPrefix.dropRight(1), ImportData.TermType + fullPrefix.last, + Name(rename), + fullPrefix.dropRight(1), + ImportData.TermType )) Result.Source( Util.normalizeNewlines(code), codeSrc, Imports(importData), - exec=false + exec = false ) }) } @@ -314,10 +340,12 @@ object ImportHook{ } object Repo extends ImportHook { - override def handle(source: CodeSource, - tree: ImportTree, - interp: InterpreterInterface, - wrapperPath: Seq[Name]) = { + override def handle( + source: CodeSource, + tree: ImportTree, + interp: InterpreterInterface, + wrapperPath: Seq[Name] + ) = { val urlOpt = (tree.prefix.iterator ++ tree.mappings.iterator.flatMap(_.map(_._1).iterator)) .toStream .headOption diff --git a/amm/runtime/src/main/scala/ammonite/runtime/Storage.scala b/amm/runtime/src/main/scala/ammonite/runtime/Storage.scala index 040efca70..defde7607 100644 --- a/amm/runtime/src/main/scala/ammonite/runtime/Storage.scala +++ b/amm/runtime/src/main/scala/ammonite/runtime/Storage.scala @@ -12,41 +12,42 @@ import scala.util.Try import scala.collection.mutable import scala.reflect.NameTransformer.encode - /** * Trait for the interface of common persistent storage. This handles history * and persistent caches. Right now it is not threadsafe nor does it handle * the mutual exclusion of files between processes. Mutexes should be added * to be able to run multiple Ammonite processes on the same system. */ -trait Storage{ +trait Storage { def loadPredef: Option[(String, os.Path)] val fullHistory: StableRef[History] val ivyCache: StableRef[Storage.IvyMap] def compileCacheSave(path: String, tag: Tag, data: Storage.CompileCache): Unit def compileCacheLoad(path: String, tag: Tag): Option[Storage.CompileCache] - def classFilesListSave(filePathPrefix: os.SubPath, - perBlockMetadata: Seq[ScriptOutput.BlockMetadata], - tag: Tag): Unit + def classFilesListSave( + filePathPrefix: os.SubPath, + perBlockMetadata: Seq[ScriptOutput.BlockMetadata], + tag: Tag + ): Unit def classFilesListLoad(filePathPrefix: os.SubPath, tag: Tag): Option[ScriptOutput] def getSessionId: Long def dirOpt: Option[os.Path] = None } -object Storage{ +object Storage { case class CompileCache(classFiles: Vector[(String, Array[Byte])], imports: Imports) type IvyMap = Map[(String, Seq[Dependency]), Seq[String]] private final case class DependencyLike( - module: DependencyLike.ModuleLike, - version: String, - exclusions: Set[(String, String)], - configuration: String, - `type`: String, - classifier: String, - transitive: Boolean + module: DependencyLike.ModuleLike, + version: String, + exclusions: Set[(String, String)], + configuration: String, + `type`: String, + classifier: String, + transitive: Boolean ) { def dependency: Dependency = { val dep = Dependency.of(module.module, version) @@ -88,16 +89,17 @@ object Storage{ helper.bimap(DependencyLike.apply(_), _.dependency) } implicit def tagRW: upickle.default.ReadWriter[Tag] = upickle.default.macroRW + /** - * Read/write [[Name]]s as unboxed strings, in order to save verbosity - * in the JSON cache files as well as improving performance of - * reading/writing since we read/write [[Name]]s a *lot*. - */ + * Read/write [[Name]]s as unboxed strings, in order to save verbosity + * in the JSON cache files as well as improving performance of + * reading/writing since we read/write [[Name]]s a *lot*. + */ implicit val nameRW: upickle.default.ReadWriter[Name] = upickle.default.readwriter[String].bimap[Name]( name => name.raw, raw => Name(raw) - ) + ) implicit def importTreeRW: upickle.default.ReadWriter[ImportTree] = upickle.default.macroRW implicit def versionedWrapperIdRW: upickle.default.ReadWriter[VersionedWrapperId] = upickle.default.macroRW @@ -114,36 +116,38 @@ object Storage{ upickle.default.readwriter[Seq[ImportData]].bimap[Imports]( imports => imports.value, data => Imports(data) - ) - - private def loadIfTagMatches(loadedTag: Tag, - cacheTag: Tag, - classFilesList: Seq[ScriptOutput.BlockMetadata], - compileCacheLoad: (String, Tag) => Option[CompileCache]) = { + ) + + private def loadIfTagMatches( + loadedTag: Tag, + cacheTag: Tag, + classFilesList: Seq[ScriptOutput.BlockMetadata], + compileCacheLoad: (String, Tag) => Option[CompileCache] + ) = { if (loadedTag != cacheTag) None - else{ + else { val res = - for(blockMeta <- classFilesList) - yield compileCacheLoad(blockMeta.id.wrapperPath, blockMeta.id.tag) + for (blockMeta <- classFilesList) + yield compileCacheLoad(blockMeta.id.wrapperPath, blockMeta.id.tag) if (res.exists(_.isEmpty)) None else Some(ScriptOutput(ScriptOutput.Metadata(classFilesList), res.flatten.map(_.classFiles))) } } - case class InMemory() extends Storage{ + case class InMemory() extends Storage { var predef = "" var sharedPredef = "" def loadPredef = None def getSessionId = 0L var _history = new History(Vector()) - val fullHistory = new StableRef[History]{ + val fullHistory = new StableRef[History] { def apply() = _history def update(h: History): Unit = _history = h } var _ivyCache: IvyMap = Map.empty - val ivyCache = new StableRef[IvyMap]{ + val ivyCache = new StableRef[IvyMap] { def apply() = _ivyCache def update(value: IvyMap): Unit = _ivyCache = value } @@ -162,17 +166,18 @@ object Storage{ } yield data } - def classFilesListSave(filePathPrefix: os.SubPath, - perBlockMetadata: Seq[ScriptOutput.BlockMetadata], - tag: Tag): Unit = { + def classFilesListSave( + filePathPrefix: os.SubPath, + perBlockMetadata: Seq[ScriptOutput.BlockMetadata], + tag: Tag + ): Unit = { classFilesListcache(filePathPrefix.toString) = (tag, perBlockMetadata) } - def classFilesListLoad(filePathPrefix: os.SubPath, - cacheTag: Tag): Option[ScriptOutput] = { + def classFilesListLoad(filePathPrefix: os.SubPath, cacheTag: Tag): Option[ScriptOutput] = { - classFilesListcache.get(filePathPrefix.toString) match{ + classFilesListcache.get(filePathPrefix.toString) match { case None => None case Some((loadedTag, classFilesList)) => loadIfTagMatches(loadedTag, cacheTag, classFilesList, compileCacheLoad) @@ -180,37 +185,37 @@ object Storage{ } } - - - class Folder(val dir: os.Path, isRepl: Boolean = true) extends Storage{ - def predef = if (isRepl) dir/"predef.sc" else dir/"predefScript.sc" + class Folder(val dir: os.Path, isRepl: Boolean = true) extends Storage { + def predef = if (isRepl) dir / "predef.sc" else dir / "predefScript.sc" // Each version puts its cache in a separate folder, to bust caches // on every version bump; otherwise binary-incompatible changes to // ReplAPI/Preprocessor/ammonite-ops will cause scripts to fail after // someone upgrades Ammonite. - val cacheDir = dir/"cache"/ammonite.Constants.version - val compileCacheDir = cacheDir/"compile" + val cacheDir = dir / "cache" / ammonite.Constants.version + val compileCacheDir = cacheDir / "compile" val classFilesOrder = "classFilesOrder.json" - val ivyCacheFile = cacheDir/"ivycache.json" - val coursierFetchCacheDir = cacheDir/"coursier-fetch-cache" + val ivyCacheFile = cacheDir / "ivycache.json" + val coursierFetchCacheDir = cacheDir / "coursier-fetch-cache" val metadataFile = "metadata.json" - val sessionFile = dir/"session" + val sessionFile = dir / "session" def getSessionId: Long = { try os.read(sessionFile).toLong - catch{case e: Throwable => - val randomId = math.abs(util.Random.nextLong) - os.write.over(sessionFile, randomId.toString, createFolders = true) - randomId + catch { + case e: Throwable => + val randomId = math.abs(util.Random.nextLong) + os.write.over(sessionFile, randomId.toString, createFolders = true) + randomId } } - val historyFile = dir/"history" - val fullHistory = new StableRef[History]{ + val historyFile = dir / "history" + val fullHistory = new StableRef[History] { def apply(): History = { - try{ + try { new History(upickle.default.read[Vector[String]](os.read(historyFile))) - }catch{case e: Exception => - new History(Vector()) + } catch { + case e: Exception => + new History(Vector()) } } @@ -223,18 +228,19 @@ object Storage{ } } - - def classFilesListSave(filePathPrefix: os.SubPath, - perBlockMetadata: Seq[ScriptOutput.BlockMetadata], - tag: Tag): Unit = { + def classFilesListSave( + filePathPrefix: os.SubPath, + perBlockMetadata: Seq[ScriptOutput.BlockMetadata], + tag: Tag + ): Unit = { val codeCacheDir = - cacheDir/"scriptCaches"/filePathPrefix/tag.code/tag.env/tag.classPathWhitelistHash + cacheDir / "scriptCaches" / filePathPrefix / tag.code / tag.env / tag.classPathWhitelistHash os.makeDir.all(codeCacheDir) try { os.write.over( - codeCacheDir/classFilesOrder, + codeCacheDir / classFilesOrder, upickle.default.stream((tag, perBlockMetadata), indent = 4), createFolders = true ) @@ -249,23 +255,21 @@ object Storage{ val fileData = os.read(path) val parsed = upickle.default.read[T](fileData) Some(parsed) - } - catch{ case e: Throwable => None } + } catch { case e: Throwable => None } } - def classFilesListLoad(filePathPrefix: os.SubPath, - tag: Tag): Option[ScriptOutput] = { + def classFilesListLoad(filePathPrefix: os.SubPath, tag: Tag): Option[ScriptOutput] = { val codeCacheDir = - cacheDir/"scriptCaches"/filePathPrefix/tag.code/tag.env/tag.classPathWhitelistHash + cacheDir / "scriptCaches" / filePathPrefix / tag.code / tag.env / tag.classPathWhitelistHash - if(!os.exists(codeCacheDir)) None + if (!os.exists(codeCacheDir)) None else { val metadataJson = readJson[(Tag, Seq[ScriptOutput.BlockMetadata])]( - codeCacheDir/classFilesOrder + codeCacheDir / classFilesOrder ) - metadataJson match{ + metadataJson match { case Some(metadata) => val (loadedTag, classFilesList) = metadata loadIfTagMatches(loadedTag, tag, classFilesList, compileCacheLoad) @@ -274,34 +278,36 @@ object Storage{ } } - } def compileCacheSave(path: String, tag: Tag, data: CompileCache): Unit = { val tagCacheDir = { - compileCacheDir/path.split('.').map(encode)/tag.code/tag.env/tag.classPathWhitelistHash + compileCacheDir / path.split('.').map( + encode + ) / tag.code / tag.env / tag.classPathWhitelistHash } os.makeDir.all(tagCacheDir) os.write.over( - tagCacheDir/metadataFile, + tagCacheDir / metadataFile, upickle.default.stream((tag, data.imports), indent = 4), - createFolders = true + createFolders = true ) - data.classFiles.foreach{ case (name, bytes) => - os.write.over(tagCacheDir/name.split('/'), bytes, createFolders = true) + data.classFiles.foreach { case (name, bytes) => + os.write.over(tagCacheDir / name.split('/'), bytes, createFolders = true) } } def compileCacheLoad(path: String, tag: Tag): Option[CompileCache] = { val tagCacheDir = { - compileCacheDir/path.split('.').map(encode)/tag.code/tag.env/tag.classPathWhitelistHash + compileCacheDir / path.split('.').map( + encode + ) / tag.code / tag.env / tag.classPathWhitelistHash } - if(!os.exists(tagCacheDir)) None - else for{ - (loadedTag, metadata) <- readJson[(Tag, Imports)](tagCacheDir/metadataFile) - + if (!os.exists(tagCacheDir)) None + else for { + (loadedTag, metadata) <- readJson[(Tag, Imports)](tagCacheDir / metadataFile) if tag == loadedTag classFiles <- loadClassFiles(tagCacheDir) } yield { @@ -311,8 +317,8 @@ object Storage{ def loadClassFiles(cacheDir: os.Path): Option[ClassFiles] = { val classFiles = os.walk(cacheDir).filter(os.isFile(_)).toVector - Try{ - val data = classFiles.map{ case file => + Try { + val data = classFiles.map { case file => val className = (file relativeTo cacheDir).toString (className, os.read.bytes(file)) } @@ -320,15 +326,15 @@ object Storage{ }.toOption } - val ivyCache = new StableRef[IvyMap]{ + val ivyCache = new StableRef[IvyMap] { def apply() = { val json = try os.read(ivyCacheFile) - catch{ case e: java.nio.file.NoSuchFileException => "[]" } + catch { case e: java.nio.file.NoSuchFileException => "[]" } val map = try upickle.default.read[IvyMap](json) - catch{ case e: Exception => Map.empty } + catch { case e: Exception => Map.empty } // Check that cached files exist map.filter(_._2.forall(str => Files.exists(Paths.get(str)))).asInstanceOf[IvyMap] } @@ -339,7 +345,7 @@ object Storage{ def loadPredef = { try Some((os.read(predef), predef)) - catch { case e: java.nio.file.NoSuchFileException => Some(("", predef))} + catch { case e: java.nio.file.NoSuchFileException => Some(("", predef)) } } override def dirOpt: Option[os.Path] = Some(dir) diff --git a/amm/runtime/src/main/scala/ammonite/runtime/package.scala b/amm/runtime/src/main/scala/ammonite/runtime/package.scala index 11a45eb56..6b1b20ff1 100644 --- a/amm/runtime/src/main/scala/ammonite/runtime/package.scala +++ b/amm/runtime/src/main/scala/ammonite/runtime/package.scala @@ -1,7 +1,7 @@ package ammonite /** - * What actually lets us compile and execute code in the Ammonite REPL; deals - * with the Scala compiler, preprocessing the strings, JVM classloaders, etc. - */ -package object runtime \ No newline at end of file + * What actually lets us compile and execute code in the Ammonite REPL; deals + * with the Scala compiler, preprocessing the strings, JVM classloaders, etc. + */ +package object runtime diff --git a/amm/src/main/scala/ammonite/AmmoniteMain.scala b/amm/src/main/scala/ammonite/AmmoniteMain.scala index 32b3510d1..7650c3de3 100644 --- a/amm/src/main/scala/ammonite/AmmoniteMain.scala +++ b/amm/src/main/scala/ammonite/AmmoniteMain.scala @@ -16,7 +16,7 @@ import scala.concurrent.duration.Duration // needed to support deprecated Main.main import acyclic.skipped -object AmmoniteMain{ +object AmmoniteMain { /** * The command-line entry point, which does all the argument parsing before @@ -38,17 +38,18 @@ object AmmoniteMain{ * can be unit tested without spinning up lots of separate, expensive * processes */ - def main0(args: List[String], - stdIn: InputStream, - stdOut: OutputStream, - stdErr: OutputStream): Boolean = { + def main0( + args: List[String], + stdIn: InputStream, + stdOut: OutputStream, + stdErr: OutputStream + ): Boolean = { val printErr = new PrintStream(stdErr) val printOut = new PrintStream(stdOut) - val customName = s"Ammonite REPL & Script-Runner, ${ammonite.Constants.version}" val customDoc = "usage: amm [ammonite-options] [script-file [script-options]]" - Config.parser.constructEither(args, customName = customName, customDoc = customDoc) match{ + Config.parser.constructEither(args, customName = customName, customDoc = customDoc) match { case Left(msg) => printErr.println(msg) false @@ -68,13 +69,18 @@ object AmmoniteMain{ Await.result(shutdownFuture, Duration.Inf) printErr.println("BSP server done") true - }else if (cliConfig.core.showVersion.value){ + } else if (cliConfig.core.showVersion.value) { printOut.println(customName) true - }else{ + } else { val runner = new MainRunner( - cliConfig, printOut, printErr, stdIn, stdOut, stdErr, + cliConfig, + printOut, + printErr, + stdIn, + stdOut, + stdErr, os.pwd ) @@ -83,7 +89,7 @@ object AmmoniteMain{ runner.printInfo(msg) } - (cliConfig.core.code, cliConfig.rest.value.toList) match{ + (cliConfig.core.code, cliConfig.rest.value.toList) match { case (Some(code), Nil) => runner.runCode(code) @@ -93,7 +99,6 @@ object AmmoniteMain{ true case (None, head :: rest) if head.startsWith("-") => - val failureMsg = "Unknown Ammonite option: " + head + Util.newLine + "Use --help to list possible options" @@ -109,11 +114,4 @@ object AmmoniteMain{ } } - - - - - - - } diff --git a/amm/src/main/scala/ammonite/Main.scala b/amm/src/main/scala/ammonite/Main.scala index 7461d9c4e..ff84af337 100644 --- a/amm/src/main/scala/ammonite/Main.scala +++ b/amm/src/main/scala/ammonite/Main.scala @@ -23,79 +23,80 @@ import scala.concurrent.duration.Duration // needed to support deprecated Main.main import acyclic.skipped - /** - * Contains the various entry points to the Ammonite REPL. - * - * Configuration of the basic REPL is done by passing in arguments when - * constructing the [[Main]] instance, and the various entrypoints such - * as [[run]] [[runScript]] and so on are methods on that instance. - * - * It is more or less equivalent to the [[ammonite.repl.Repl]] object itself, and has - * a similar set of parameters, but does not have any of the [[ammonite.repl.Repl]]'s - * implementation-related code and provides a more convenient set of - * entry-points that a user can call. - * - * Note that the [[instantiateRepl]] function generates a new [[Repl]] - * every time it is called! - * - * @param predefCode Any additional code you want to run before the REPL session - * starts. Can contain multiple blocks separated by `@`s - * @param defaultPredef Do you want to include the "standard" predef imports - * provided by Ammonite? These include tools like `time`, - * `grep`, the `|` or `|?` pipes from ammonite-ops, and - * other helpers. Can be disabled to give a clean - * namespace for you to fill using your own predef. - * @param storageBackend Where will all of Ammonite's persistent data get - * stored? Things like any `predef.sc` file, - * compilation/ivy caches, etc.. Defaults include - * [[Storage.Folder]] and [[Storage.InMemory]], though - * you can create your own. - * @param wd The working directory of the REPL; when it load scripts, where - * the scripts will be considered relative to when assigning them - * packages - * - * @param inputStream Where input to the Repl is coming from, typically System.in, - * but it could come from somewhere else e.g. across the - * network in the case of the SshdRepl - * @param outputStream Primary output of code run using Ammonite - * @param errorStream Error output when things go bad, typically System.err; also - * gets sent miscellaneous info messages that aren't strictly - * part of the REPL or script's output - */ -case class Main(predefCode: String = "", - predefFile: Option[os.Path] = None, - defaultPredef: Boolean = true, - storageBackend: Storage = new Storage.Folder(Defaults.ammoniteHome), - wd: os.Path = os.pwd, - welcomeBanner: Option[String] = Some(Defaults.welcomeBanner), - inputStream: InputStream = System.in, - outputStream: OutputStream = System.out, - errorStream: OutputStream = System.err, - verboseOutput: Boolean = true, - @deprecated("remoteLogging has been removed, do not use this field", - "Ammonite 2.3.0") - remoteLogging: Boolean = true, - colors: Colors = Colors.Default, - replCodeWrapper: CodeWrapper = DefaultCodeWrapper, - scriptCodeWrapper: CodeWrapper = DefaultCodeWrapper, - alreadyLoadedDependencies: Seq[Dependency] = - Defaults.alreadyLoadedDependencies(), - importHooks: Map[Seq[String], ImportHook] = ImportHook.defaults, - compilerBuilder: CompilerBuilder = ammonite.compiler.CompilerBuilder(), - // by-name, so that fastparse isn't loaded when we don't need it - parser: () => Parser = () => ammonite.compiler.Parsers, - classPathWhitelist: Set[Seq[String]] = Set.empty, - warnings: Boolean = false){ + * Contains the various entry points to the Ammonite REPL. + * + * Configuration of the basic REPL is done by passing in arguments when + * constructing the [[Main]] instance, and the various entrypoints such + * as [[run]] [[runScript]] and so on are methods on that instance. + * + * It is more or less equivalent to the [[ammonite.repl.Repl]] object itself, and has + * a similar set of parameters, but does not have any of the [[ammonite.repl.Repl]]'s + * implementation-related code and provides a more convenient set of + * entry-points that a user can call. + * + * Note that the [[instantiateRepl]] function generates a new [[Repl]] + * every time it is called! + * + * @param predefCode Any additional code you want to run before the REPL session + * starts. Can contain multiple blocks separated by `@`s + * @param defaultPredef Do you want to include the "standard" predef imports + * provided by Ammonite? These include tools like `time`, + * `grep`, the `|` or `|?` pipes from ammonite-ops, and + * other helpers. Can be disabled to give a clean + * namespace for you to fill using your own predef. + * @param storageBackend Where will all of Ammonite's persistent data get + * stored? Things like any `predef.sc` file, + * compilation/ivy caches, etc.. Defaults include + * [[Storage.Folder]] and [[Storage.InMemory]], though + * you can create your own. + * @param wd The working directory of the REPL; when it load scripts, where + * the scripts will be considered relative to when assigning them + * packages + * + * @param inputStream Where input to the Repl is coming from, typically System.in, + * but it could come from somewhere else e.g. across the + * network in the case of the SshdRepl + * @param outputStream Primary output of code run using Ammonite + * @param errorStream Error output when things go bad, typically System.err; also + * gets sent miscellaneous info messages that aren't strictly + * part of the REPL or script's output + */ +case class Main( + predefCode: String = "", + predefFile: Option[os.Path] = None, + defaultPredef: Boolean = true, + storageBackend: Storage = new Storage.Folder(Defaults.ammoniteHome), + wd: os.Path = os.pwd, + welcomeBanner: Option[String] = Some(Defaults.welcomeBanner), + inputStream: InputStream = System.in, + outputStream: OutputStream = System.out, + errorStream: OutputStream = System.err, + verboseOutput: Boolean = true, + @deprecated("remoteLogging has been removed, do not use this field", "Ammonite 2.3.0") + remoteLogging: Boolean = true, + colors: Colors = Colors.Default, + replCodeWrapper: CodeWrapper = DefaultCodeWrapper, + scriptCodeWrapper: CodeWrapper = DefaultCodeWrapper, + alreadyLoadedDependencies: Seq[Dependency] = + Defaults.alreadyLoadedDependencies(), + importHooks: Map[Seq[String], ImportHook] = ImportHook.defaults, + compilerBuilder: CompilerBuilder = ammonite.compiler.CompilerBuilder(), + // by-name, so that fastparse isn't loaded when we don't need it + parser: () => Parser = () => ammonite.compiler.Parsers, + classPathWhitelist: Set[Seq[String]] = Set.empty, + warnings: Boolean = false +) { - def loadedPredefFile = predefFile match{ + def loadedPredefFile = predefFile match { case Some(path) => try Right(Some(PredefInfo(Name("FilePredef"), os.read(path), false, Some(path)))) - catch{case e: NoSuchFileException => - Left(( - Res.Failure("Unable to load predef file " + path), - Seq((Watchable.Path(path), 0L))) - ) + catch { + case e: NoSuchFileException => + Left(( + Res.Failure("Unable to load predef file " + path), + Seq((Watchable.Path(path), 0L)) + )) } case None => Right(None) } @@ -106,17 +107,16 @@ case class Main(predefCode: String = "", } /** - * Instantiates an ammonite.Repl using the configuration - */ + * Instantiates an ammonite.Repl using the configuration + */ def instantiateRepl(replArgs: IndexedSeq[Bind[_]] = Vector.empty) = { - - loadedPredefFile.right.map{ predefFileInfoOpt => + loadedPredefFile.right.map { predefFileInfoOpt => val augmentedImports = if (defaultPredef) Defaults.replImports ++ Interpreter.predefImports else Imports() - val argString = replArgs.zipWithIndex.map{ case (b, idx) => + val argString = replArgs.zipWithIndex.map { case (b, idx) => s""" val ${b.name} = ammonite .repl @@ -130,14 +130,16 @@ case class Main(predefCode: String = "", }.mkString(newLine) new Repl( - inputStream, outputStream, errorStream, + inputStream, + outputStream, + errorStream, storage = storageBackend, baseImports = augmentedImports, basePredefs = Seq( PredefInfo(Name("ArgsPredef"), argString, false, None) ), customPredefs = predefFileInfoOpt.toSeq ++ Seq( - PredefInfo(Name("CodePredef"), predefCode, false, Some(wd/"(console)")) + PredefInfo(Name("CodePredef"), predefCode, false, Some(wd / "(console)")) ), wd = wd, welcomeBanner = welcomeBanner, @@ -205,7 +207,7 @@ case class Main(predefCode: String = "", } ) ) - interp.initializePredef(Seq(), customPredefs, bridges, augmentedImports) match{ + interp.initializePredef(Seq(), customPredefs, bridges, augmentedImports) match { case None => Right(interp) case Some(problems) => Left(problems) } @@ -214,17 +216,16 @@ case class Main(predefCode: String = "", } /** - * Run the REPL, with any additional bindings you wish to provide. - * - * Returns an `Any` representing any value that the user passed into the - * `exit` call when closing the REPL (defaults to `(): Unit`). Also returns - * a sequence of paths that were watched as a result of this REPL run, in - * case you wish to re-start the REPL when any of them change. - */ + * Run the REPL, with any additional bindings you wish to provide. + * + * Returns an `Any` representing any value that the user passed into the + * `exit` call when closing the REPL (defaults to `(): Unit`). Also returns + * a sequence of paths that were watched as a result of this REPL run, in + * case you wish to re-start the REPL when any of them change. + */ def run(replArgs: Bind[_]*): (Res[Any], Seq[(Watchable, Long)]) = { - - instantiateRepl(replArgs.toIndexedSeq) match{ + instantiateRepl(replArgs.toIndexedSeq) match { case Left(missingPredefInfo) => missingPredefInfo case Right(repl) => repl.initializePredef() match { @@ -237,7 +238,7 @@ case class Main(predefCode: String = "", // Warm up the compilation logic in the background, hopefully while the // user is typing their first command, so by the time the command is // submitted it can be processed by a warm compiler - val warmupThread = new Thread(new Runnable{ + val warmupThread = new Thread(new Runnable { def run() = repl.warmup() }) // This thread will terminal eventually on its own, but if the @@ -252,14 +253,12 @@ case class Main(predefCode: String = "", } /** - * Run a Scala script file! takes the path to the file as well as an array - * of `args` and a map of keyword `kwargs` to pass to that file. - */ - def runScript(path: os.Path, - scriptArgs: Seq[String]) - : (Res[Any], Seq[(Watchable, Long)]) = { + * Run a Scala script file! takes the path to the file as well as an array + * of `args` and a map of keyword `kwargs` to pass to that file. + */ + def runScript(path: os.Path, scriptArgs: Seq[String]): (Res[Any], Seq[(Watchable, Long)]) = { - instantiateInterpreter() match{ + instantiateInterpreter() match { case Right(interp) => val result = main.Scripts.runScript(wd, path, interp, scriptArgs) (result, interp.watchedValues.toSeq) @@ -268,10 +267,10 @@ case class Main(predefCode: String = "", } /** - * Run a snippet of code - */ + * Run a snippet of code + */ def runCode(code: String) = { - instantiateInterpreter() match{ + instantiateInterpreter() match { case Right(interp) => val res = interp.processExec(code, 0, () => ()) (res, interp.watchedValues.toSeq) diff --git a/amm/src/main/scala/ammonite/MainRunner.scala b/amm/src/main/scala/ammonite/MainRunner.scala index 086a3055d..c74d8cba8 100644 --- a/amm/src/main/scala/ammonite/MainRunner.scala +++ b/amm/src/main/scala/ammonite/MainRunner.scala @@ -22,39 +22,43 @@ import scala.concurrent.duration.Duration import acyclic.skipped /** - * Bundles together: - * - * - All the code relying on [[cliConfig]] - * - Handling for the common input/output streams and print-streams - * - Logic around the watch-and-rerun flag - */ -class MainRunner(cliConfig: Config, - outprintStream: PrintStream, - errPrintStream: PrintStream, - stdIn: InputStream, - stdOut: OutputStream, - stdErr: OutputStream, - wd: os.Path){ + * Bundles together: + * + * - All the code relying on [[cliConfig]] + * - Handling for the common input/output streams and print-streams + * - Logic around the watch-and-rerun flag + */ +class MainRunner( + cliConfig: Config, + outprintStream: PrintStream, + errPrintStream: PrintStream, + stdIn: InputStream, + stdOut: OutputStream, + stdErr: OutputStream, + wd: os.Path +) { // for trapping exit when the --watch option is on if (cliConfig.core.watch.value && ammonite.util.Util.javaMajorVersion < 17) System.setSecurityManager(TrapExitSecurityManager) val colors = - if(cliConfig.core.color.getOrElse(ammonite.util.Util.isInteractive())) Colors.Default + if (cliConfig.core.color.getOrElse(ammonite.util.Util.isInteractive())) Colors.Default else Colors.BlackWhite def printInfo(s: String) = errPrintStream.println(colors.info()(s)) def printError(s: String) = errPrintStream.println(colors.error()(s)) - @tailrec final def watchLoop[T](isRepl: Boolean, - printing: Boolean, - run: Main => (Res[T], Seq[(Watchable, Long)])): Boolean = { + @tailrec final def watchLoop[T]( + isRepl: Boolean, + printing: Boolean, + run: Main => (Res[T], Seq[(Watchable, Long)]) + ): Boolean = { val (result, watched) = run(initMain(isRepl)) val success = handleWatchRes(result, printing) if (!cliConfig.core.watch.value) success - else{ + else { watchAndWait(watched) watchLoop(isRepl, printing, run) } @@ -102,7 +106,7 @@ class MainRunner(cliConfig: Config, if (printing && value != ()) outprintStream.println(pprint.PPrinter.BlackWhite(value)) true - case Res.Skip => true // do nothing on success, everything's already happened + case Res.Skip => true // do nothing on success, everything's already happened } success } @@ -112,7 +116,7 @@ class MainRunner(cliConfig: Config, new Storage.Folder(cliConfig.core.home, isRepl) { override def loadPredef = None } - }else{ + } else { new Storage.Folder(cliConfig.core.home, isRepl) } @@ -130,7 +134,10 @@ class MainRunner(cliConfig: Config, inputStream = stdIn, outputStream = stdOut, errorStream = stdErr, - welcomeBanner = cliConfig.repl.banner match{case "" => None case s => Some(s)}, + welcomeBanner = cliConfig.repl.banner match { + case "" => None + case s => Some(s) + }, verboseOutput = !cliConfig.core.silent.value, remoteLogging = !cliConfig.repl.noRemoteLogging.value, colors = colors, @@ -143,7 +150,8 @@ class MainRunner(cliConfig: Config, compilerBuilder = ammonite.compiler.CompilerBuilder( outputDir = cliConfig.repl.outputDirectory.map(_.toNIO) .orElse { - if (cliConfig.repl.tmpOutputDirectory.value) Some(os.temp.dir(prefix = "ammonite-output").toNIO) + if (cliConfig.repl.tmpOutputDirectory.value) + Some(os.temp.dir(prefix = "ammonite-output").toNIO) else None } ), @@ -152,14 +160,13 @@ class MainRunner(cliConfig: Config, } } -object MainRunner{ +object MainRunner { /** * Polls for updates until either one of the input files changes, * or the enter key is pressed - * */ - def statWatchWait(watched: Seq[(Watchable, Long)], - stdIn: InputStream): Unit = { + */ + def statWatchWait(watched: Seq[(Watchable, Long)], stdIn: InputStream): Unit = { val buffer = new Array[Byte](4 * 1024) def allWatchedUnchanged() = @@ -176,10 +183,10 @@ object MainRunner{ } @tailrec def lookForEnterKey(): Boolean = { if (stdIn.available() == 0) false - else stdIn.read(buffer) match{ + else stdIn.read(buffer) match { case 0 | -1 => false case n => - buffer.indexOf('\n') match{ + buffer.indexOf('\n') match { case -1 => lookForEnterKey() case i => if (i >= n) lookForEnterKey() @@ -190,5 +197,4 @@ object MainRunner{ statWatchWait0() } - -} \ No newline at end of file +} diff --git a/amm/src/main/scala/ammonite/main/Config.scala b/amm/src/main/scala/ammonite/main/Config.scala index 9d43305d4..d5ad26706 100644 --- a/amm/src/main/scala/ammonite/main/Config.scala +++ b/amm/src/main/scala/ammonite/main/Config.scala @@ -3,109 +3,124 @@ package ammonite.main import mainargs.{main, arg, Flag, Leftover, ParserForClass} import ammonite.repl.tools.Util.PathRead @main -case class Config(core: Config.Core, - predef: Config.Predef, - repl: Config.Repl, - rest: Leftover[String]) +case class Config( + core: Config.Core, + predef: Config.Predef, + repl: Config.Repl, + rest: Leftover[String] +) -object Config{ +object Config { @main case class Core( - @arg( - name = "no-default-predef", - doc = "Disable the default predef and run Ammonite with the minimal predef possible") - noDefaultPredef: Flag, - @arg( - short = 's', - doc = """Make ivy logs go silent instead of printing though failures will - still throw exception""") - silent: Flag, - - @arg( - short = 'w', - doc = "Watch and re-run your scripts when they change") - watch: Flag, - @arg(doc = "Run a BSP server against the passed scripts") - bsp: Flag, - @arg( - short = 'c', - doc = "Pass in code to be run immediately in the REPL") - code: Option[String] = None, - @arg( - short = 'h', - doc = "The home directory of the REPL; where it looks for config and caches") - home: os.Path = Defaults.ammoniteHome, - @arg( - name = "predef", - short = 'p', - doc ="""Lets you load your predef from a custom location, rather than the - "default location in your Ammonite home""") - predefFile: Option[os.Path] = None, - @arg( - doc = """Enable or disable colored output; by default colors are enabled + @arg( + name = "no-default-predef", + doc = "Disable the default predef and run Ammonite with the minimal predef possible" + ) + noDefaultPredef: Flag, + @arg( + short = 's', + doc = """Make ivy logs go silent instead of printing though failures will + still throw exception""" + ) + silent: Flag, + @arg( + short = 'w', + doc = "Watch and re-run your scripts when they change" + ) + watch: Flag, + @arg(doc = "Run a BSP server against the passed scripts") + bsp: Flag, + @arg( + short = 'c', + doc = "Pass in code to be run immediately in the REPL" + ) + code: Option[String] = None, + @arg( + short = 'h', + doc = "The home directory of the REPL; where it looks for config and caches" + ) + home: os.Path = Defaults.ammoniteHome, + @arg( + name = "predef", + short = 'p', + doc = """Lets you load your predef from a custom location, rather than the + "default location in your Ammonite home""" + ) + predefFile: Option[os.Path] = None, + @arg( + doc = """Enable or disable colored output; by default colors are enabled in both REPL and scripts if the console is interactive, and disabled - otherwise""") - color: Option[Boolean] = None, - @arg( - doc ="""Hide parts of the core of Ammonite and some of its dependencies. By default, + otherwise""" + ) + color: Option[Boolean] = None, + @arg( + doc = """Hide parts of the core of Ammonite and some of its dependencies. By default, the core of Ammonite and all of its dependencies can be seen by users from the - Ammonite session. This option mitigates that via class loader isolation.""") - thin: Flag, - @arg(doc = "Print this message") - help: Flag, - @arg(name = "version", short = 'v', doc = "Show Ammonite's version") - showVersion: Flag, - @arg(name = "no-warn", doc = "Disable compiler warnings") - noWarnings: Flag + Ammonite session. This option mitigates that via class loader isolation.""" + ) + thin: Flag, + @arg(doc = "Print this message") + help: Flag, + @arg(name = "version", short = 'v', doc = "Show Ammonite's version") + showVersion: Flag, + @arg(name = "no-warn", doc = "Disable compiler warnings") + noWarnings: Flag ) implicit val coreParser: ParserForClass[Core] = ParserForClass[Core] @main case class Predef( - @arg( - name = "predef-code", - doc = "Any commands you want to execute at the start of the REPL session") - predefCode: String = "", - - @arg( - name = "no-home-predef", - doc = """Disables the default behavior of loading predef files from your + @arg( + name = "predef-code", + doc = "Any commands you want to execute at the start of the REPL session" + ) + predefCode: String = "", + @arg( + name = "no-home-predef", + doc = """Disables the default behavior of loading predef files from your ~/.ammonite/predef.sc, predefScript.sc, or predefShared.sc. You can - choose an additional predef to use using `--predef""") - noHomePredef: Flag + choose an additional predef to use using `--predef""" + ) + noHomePredef: Flag ) implicit val predefParser: ParserForClass[Predef] = ParserForClass[Predef] @main case class Repl( - @arg( - short = 'b', - doc = "Customize the welcome banner that gets shown when Ammonite starts") - banner: String = Defaults.welcomeBanner, - @arg( - name = "no-remote-logging", - doc = - "(deprecated) Disable remote logging of the number of times a REPL starts and runs commands") - noRemoteLogging: Flag, - @arg( - name = "class-based", - doc = - "Wrap user code in classes rather than singletons, typically for Java serialization "+ - "friendliness.") - classBased: Flag, - @arg( - name = "output-directory", - doc = """Write byte code of the user code in a directory. - The path of that directory can also be accessed later on from the REPL via 'interp.outputDir'.""") - outputDirectory: Option[os.Path] = None, - @arg( - name = "tmp-output-directory", - doc = """Write byte code of the user code in a temporary directory, created by Ammonite. + @arg( + short = 'b', + doc = "Customize the welcome banner that gets shown when Ammonite starts" + ) + banner: String = Defaults.welcomeBanner, + @arg( + name = "no-remote-logging", + doc = + "(deprecated) Disable remote logging of the number of times a REPL starts and runs commands" + ) + noRemoteLogging: Flag, + @arg( + name = "class-based", + doc = + "Wrap user code in classes rather than singletons, typically for Java serialization " + + "friendliness." + ) + classBased: Flag, + @arg( + name = "output-directory", + doc = """Write byte code of the user code in a directory. + The path of that directory can also be accessed later on from the REPL via 'interp.outputDir'.""" + ) + outputDirectory: Option[os.Path] = None, + @arg( + name = "tmp-output-directory", + doc = """Write byte code of the user code in a temporary directory, created by Ammonite. You can access get that directory later on via 'interp.outputDir'. That directory is deleted by Ammonite upon exit. Use --output-directory if you'd like - the output directory not to be erased.""") - tmpOutputDirectory: Flag + the output directory not to be erased.""" + ) + tmpOutputDirectory: Flag ) implicit val replParser: ParserForClass[Repl] = ParserForClass[Repl] diff --git a/amm/src/main/scala/ammonite/main/ProxyFromEnv.scala b/amm/src/main/scala/ammonite/main/ProxyFromEnv.scala index 8bb9d9346..4d3964ede 100644 --- a/amm/src/main/scala/ammonite/main/ProxyFromEnv.scala +++ b/amm/src/main/scala/ammonite/main/ProxyFromEnv.scala @@ -1,36 +1,36 @@ package ammonite.main /** - * Give Ammonite the ability to read (linux) system proxy environment variables - * and convert them into java proxy properties. Which allows Ammonite to work - * through proxy automatically, instead of setting `System.properties` manually. - * - * See issue 460. - * - * Parameter pattern: - * https://docs.oracle.com/javase/7/docs/api/java/net/doc-files/net-properties.html - * - * Created by cuz on 17-5-21. - */ + * Give Ammonite the ability to read (linux) system proxy environment variables + * and convert them into java proxy properties. Which allows Ammonite to work + * through proxy automatically, instead of setting `System.properties` manually. + * + * See issue 460. + * + * Parameter pattern: + * https://docs.oracle.com/javase/7/docs/api/java/net/doc-files/net-properties.html + * + * Created by cuz on 17-5-21. + */ private[ammonite] object ProxyFromEnv { - private lazy val KeyPattern ="""([\w\d]+)_proxy""".r - private lazy val UrlPattern ="""([\w\d]+://)?(.+@)?([\w\d\.\-]+):(\d+)/?""".r + private lazy val KeyPattern = """([\w\d]+)_proxy""".r + private lazy val UrlPattern = """([\w\d]+://)?(.+@)?([\w\d\.\-]+):(\d+)/?""".r /** - * Get current proxy environment variables. - */ + * Get current proxy environment variables. + */ private def getEnvs = sys.env.map { case (k, v) => (k.toLowerCase, v.toLowerCase) } .filterKeys(_.endsWith("proxy")) .toMap /** - * Convert single proxy environment variable to corresponding system proxy properties. - */ + * Convert single proxy environment variable to corresponding system proxy properties. + */ private def envToProps(env: (String, String)): Map[String, String] = env match { case ("no_proxy", noProxySeq) => val converted = noProxySeq.split(""",""").mkString("|") - //https uses the same as http's. Ftp need not to be set here. + // https uses the same as http's. Ftp need not to be set here. Map("http.nonProxyHosts" -> converted) case (KeyPattern(proto), UrlPattern(_, cred, host, port)) => @@ -46,11 +46,10 @@ private[ammonite] object ProxyFromEnv { case bad => Map.empty } - /** - * Set system proxy properties from environment variables. - * Existing properties will not be overwritten. - */ + * Set system proxy properties from environment variables. + * Existing properties will not be overwritten. + */ def setPropProxyFromEnv(envs: Map[String, String] = this.getEnvs): Unit = { val sysProps = sys.props val proxyProps = envs.flatMap { env => @@ -61,8 +60,8 @@ private[ammonite] object ProxyFromEnv { } /** - * helper implicit conversion: add isDefined method to String. - */ + * helper implicit conversion: add isDefined method to String. + */ implicit private class StringIsDefined(s: String) { def isDefined: Boolean = s != null && s.length > 0 } diff --git a/amm/src/main/scala/ammonite/main/Scripts.scala b/amm/src/main/scala/ammonite/main/Scripts.scala index 74544569e..e34f4555f 100644 --- a/amm/src/main/scala/ammonite/main/Scripts.scala +++ b/amm/src/main/scala/ammonite/main/Scripts.scala @@ -7,22 +7,26 @@ import ammonite.util.{Name, Res, Util} import fastparse.internal.Util.literalize /** - * Logic around using Ammonite as a script-runner; invoking scripts via the - * macro-generated [[Router]], and pretty-printing any output or error messages - */ + * Logic around using Ammonite as a script-runner; invoking scripts via the + * macro-generated [[Router]], and pretty-printing any output or error messages + */ object Scripts { - def runScript(wd: os.Path, - path: os.Path, - interp: ammonite.interp.Interpreter, - scriptArgs: Seq[String] = Nil) = { + def runScript( + wd: os.Path, + path: os.Path, + interp: ammonite.interp.Interpreter, + scriptArgs: Seq[String] = Nil + ) = { interp.watch(path) val (pkg, wrapper) = Util.pathToPackageWrapper(Seq(), path relativeTo wd) val genRoutesCode = "mainargs.ParserForMethods[$routesOuter.type]($routesOuter)" - for{ - scriptTxt <- try Res.Success(Util.normalizeNewlines(os.read(path))) catch{ - case e: NoSuchFileException => Res.Failure("Script file not found: " + path) - } + for { + scriptTxt <- + try Res.Success(Util.normalizeNewlines(os.read(path))) + catch { + case e: NoSuchFileException => Res.Failure("Script file not found: " + path) + } processed <- interp.processModule( scriptTxt, @@ -35,22 +39,22 @@ object Scripts { // `methodsymbol.annotations` ends up being empty. extraCode = Util.normalizeNewlines( s""" - |val $$routesOuter = this - |object $$routes - |extends scala.Function0[mainargs.ParserForMethods[$$routesOuter.type]]{ - | def apply() = $genRoutesCode - |} + |val $$routesOuter = this + |object $$routes + |extends scala.Function0[mainargs.ParserForMethods[$$routesOuter.type]]{ + | def apply() = $genRoutesCode + |} """.stripMargin ), hardcoded = true ) - routeClsName <- processed.blockInfo.lastOption match{ + routeClsName <- processed.blockInfo.lastOption match { case Some(meta) => Res.Success(meta.id.wrapperPath) case None => Res.Skip } - scriptMains = interp.scriptCodeWrapper match{ + scriptMains = interp.scriptCodeWrapper match { case ammonite.compiler.DefaultCodeWrapper => Option( interp @@ -77,37 +81,37 @@ object Scripts { case _ => None } - res <- Util.withContextClassloader(interp.evalClassloader){ + res <- Util.withContextClassloader(interp.evalClassloader) { scriptMains.filter(!_.mains.value.isEmpty) match { // If there are no @main methods, there's nothing to do case None => if (scriptArgs.isEmpty) Res.Success(()) else Res.Failure( "Script " + path.last + - " does not take arguments: " + scriptArgs.map(literalize(_)).mkString(" ") + " does not take arguments: " + scriptArgs.map(literalize(_)).mkString(" ") ) // If there's one @main method, we run it with all args case Some(parser) => - if (scriptArgs.take(1) == Seq("--help")){ + if (scriptArgs.take(1) == Seq("--help")) { Res.Success( - new Object{ + new Object { override def toString() = parser.helpText( totalWidth = 100, docsOnNewLine = false ) } ) - }else mainargs.Invoker.runMains( + } else mainargs.Invoker.runMains( parser.mains, scriptArgs, allowPositional = true, allowRepeats = false - ) match{ + ) match { case Left(earlyError) => Res.Failure(mainargs.Renderer.renderEarlyError(earlyError)) case Right((mainData, result)) => - result match{ + result match { case mainargs.Result.Success(x) => Res.Success(x) case mainargs.Result.Failure.Exception(x: AmmoniteExit) => Res.Success(x.value) case mainargs.Result.Failure.Exception(x) => Res.Exception(x, "") diff --git a/amm/src/main/scala/ammonite/main/TrapExitSecurityManager.scala b/amm/src/main/scala/ammonite/main/TrapExitSecurityManager.scala index b1e4687a1..57831ea4b 100644 --- a/amm/src/main/scala/ammonite/main/TrapExitSecurityManager.scala +++ b/amm/src/main/scala/ammonite/main/TrapExitSecurityManager.scala @@ -15,4 +15,4 @@ object TrapExitSecurityManager extends SecurityManager { override def toString: String = s"script exited with status $status" override def getStackTrace: Array[StackTraceElement] = Array.empty } -} \ No newline at end of file +} diff --git a/amm/src/main/scala/ammonite/main/package.scala b/amm/src/main/scala/ammonite/main/package.scala index c37442cae..638592deb 100644 --- a/amm/src/main/scala/ammonite/main/package.scala +++ b/amm/src/main/scala/ammonite/main/package.scala @@ -1,7 +1,7 @@ package ammonite /** - * Code related to invoking Ammonite from the outside world: default - * configuration, running scripts, printing error messages, etc. - */ -package object main \ No newline at end of file + * Code related to invoking Ammonite from the outside world: default + * configuration, running scripts, printing error messages, etc. + */ +package object main diff --git a/amm/src/test/scala/ammonite/interp/CachingTests.scala b/amm/src/test/scala/ammonite/interp/CachingTests.scala index b6c86bde9..748ecebfe 100644 --- a/amm/src/test/scala/ammonite/interp/CachingTests.scala +++ b/amm/src/test/scala/ammonite/interp/CachingTests.scala @@ -9,14 +9,16 @@ import ammonite.interp.api.IvyConstructor._ import ammonite.util.{Res, Util} import utest._ -object CachingTests extends TestSuite{ - val tests = Tests{ +object CachingTests extends TestSuite { + val tests = Tests { println("ScriptTests") - def runScript(wd: os.Path, - path: os.Path, - interp: ammonite.interp.Interpreter, - scriptArgs: Seq[String] = Nil) = + def runScript( + wd: os.Path, + path: os.Path, + interp: ammonite.interp.Interpreter, + scriptArgs: Seq[String] = Nil + ) = Scripts.runScript(wd, path, interp, scriptArgs) match { case Res.Success(_) => case Res.Skip => @@ -25,17 +27,16 @@ object CachingTests extends TestSuite{ case Res.Exit(_) => throw new Exception("Unexpected exit call from script") } - val scriptPath = os.pwd/"amm"/"src"/"test"/"resources"/"scripts" + val scriptPath = os.pwd / "amm" / "src" / "test" / "resources" / "scripts" - val resourcesPath = os.pwd/"amm"/"src"/"test"/"resources" + val resourcesPath = os.pwd / "amm" / "src" / "test" / "resources" - - val tempDir = os.temp.dir(prefix="ammonite-tester") - test("noAutoIncrementWrapper"){ + val tempDir = os.temp.dir(prefix = "ammonite-tester") + test("noAutoIncrementWrapper") { val storage = Storage.InMemory() val interp = createTestInterp(storage) - runScript(os.pwd, scriptPath/"ThreeBlocks.sc", interp) - try{ + runScript(os.pwd, scriptPath / "ThreeBlocks.sc", interp) + try { Class.forName("cmd0") assert(false) } catch { @@ -43,14 +44,14 @@ object CachingTests extends TestSuite{ case e: Exception => assert(false) } } - test("blocks"){ + test("blocks") { def check(fileName: String, expected: Int) = { val storage = Storage.InMemory() val interp = createTestInterp(storage) val n0 = storage.compileCache.size assert(n0 == 0) - runScript(os.pwd, scriptPath/fileName, interp) + runScript(os.pwd, scriptPath / fileName, interp) val n = storage.compileCache.size assert(n == expected) @@ -61,7 +62,7 @@ object CachingTests extends TestSuite{ test - check("ThreeBlocks.sc", 3) } - test("processModuleCaching"){ + test("processModuleCaching") { def check(script: os.RelPath) = { val storage = new Storage.Folder(tempDir) @@ -70,7 +71,7 @@ object CachingTests extends TestSuite{ predefImports = Interpreter.predefImports ) - runScript(os.pwd, resourcesPath/script, interp1) + runScript(os.pwd, resourcesPath / script, interp1) assert(interp1.compilerManager.compiler != null) val interp2 = createTestInterp( @@ -79,25 +80,26 @@ object CachingTests extends TestSuite{ ) assert(interp2.compilerManager.compiler == null) - runScript(os.pwd, resourcesPath/script, interp2) + runScript(os.pwd, resourcesPath / script, interp2) assert(interp2.compilerManager.compiler == null) } - test("testOne") - check(os.rel/"scriptLevelCaching"/"scriptTwo.sc") - test("testTwo") - check(os.rel/"scriptLevelCaching"/"scriptOne.sc") - test("testThree") - check(os.rel/"scriptLevelCaching"/"QuickSort.sc") - test("testLoadModule") - check(os.rel/"scriptLevelCaching"/"testLoadModule.sc") - test("testFileImport") - check(os.rel/"scriptLevelCaching"/"testFileImport.sc") - test("testIvyImport") - check(os.rel/"scriptLevelCaching"/"ivyCacheTest.sc") - test("testIvyResource"){ - if (!scala2_12) check(os.rel/"scriptLevelCaching"/"ivyCachedResourceTest.sc") + test("testOne") - check(os.rel / "scriptLevelCaching" / "scriptTwo.sc") + test("testTwo") - check(os.rel / "scriptLevelCaching" / "scriptOne.sc") + test("testThree") - check(os.rel / "scriptLevelCaching" / "QuickSort.sc") + test("testLoadModule") - check(os.rel / "scriptLevelCaching" / "testLoadModule.sc") + test("testFileImport") - check(os.rel / "scriptLevelCaching" / "testFileImport.sc") + test("testIvyImport") - check(os.rel / "scriptLevelCaching" / "ivyCacheTest.sc") + test("testIvyResource") { + if (!scala2_12) check(os.rel / "scriptLevelCaching" / "ivyCachedResourceTest.sc") } } - test("testRunTimeExceptionForCachedScripts"){ + test("testRunTimeExceptionForCachedScripts") { val storage = new Storage.Folder(tempDir) - val numFile = os.pwd/"amm"/"target"/"test"/"resources"/"scriptLevelCaching"/"num.value" + val numFile = + os.pwd / "amm" / "target" / "test" / "resources" / "scriptLevelCaching" / "num.value" os.remove.all(numFile) os.write(numFile, "1", createFolders = true) val interp1 = createTestInterp( @@ -107,7 +109,7 @@ object CachingTests extends TestSuite{ runScript( os.pwd, - resourcesPath/"scriptLevelCaching"/"runTimeExceptions.sc", + resourcesPath / "scriptLevelCaching" / "runTimeExceptions.sc", interp1 ) @@ -117,17 +119,17 @@ object CachingTests extends TestSuite{ ) val Res.Exception(ex, _) = Scripts.runScript( os.pwd, - resourcesPath/"scriptLevelCaching"/"runTimeExceptions.sc", + resourcesPath / "scriptLevelCaching" / "runTimeExceptions.sc", interp2 ) assert( interp2.compilerManager.compiler == null && - ex.toString == "java.lang.ArithmeticException: / by zero" + ex.toString == "java.lang.ArithmeticException: / by zero" ) } - test("persistence"){ + test("persistence") { val tempDir = os.Path( java.nio.file.Files.createTempDirectory("ammonite-tester-x") @@ -135,27 +137,27 @@ object CachingTests extends TestSuite{ val interp1 = createTestInterp(new Storage.Folder(tempDir)) val interp2 = createTestInterp(new Storage.Folder(tempDir)) - runScript(os.pwd, scriptPath/"OneBlock.sc", interp1) - runScript(os.pwd, scriptPath/"OneBlock.sc", interp2) + runScript(os.pwd, scriptPath / "OneBlock.sc", interp1) + runScript(os.pwd, scriptPath / "OneBlock.sc", interp2) val n1 = interp1.compilationCount val n2 = interp2.compilationCount assert(n1 == 1) // OneBlock.sc assert(n2 == 0) // both should be cached } - test("tags"){ + test("tags") { val storage = Storage.InMemory() val interp = createTestInterp(storage) - runScript(os.pwd, scriptPath/"TagBase.sc", interp) - runScript(os.pwd, scriptPath/"TagPrevCommand.sc", interp) + runScript(os.pwd, scriptPath / "TagBase.sc", interp) + runScript(os.pwd, scriptPath / "TagPrevCommand.sc", interp) implicit val sv = ammonite.interp.api.ScalaVersion(interp.scalaVersion) interp.loadIvy("com.lihaoyi" %% "scalatags" % "0.7.0") - runScript(os.pwd, scriptPath/"TagBase.sc", interp) + runScript(os.pwd, scriptPath / "TagBase.sc", interp) val n = storage.compileCache.size assert(n == 4) // two blocks for each loaded file } - test("compilerInit"){ + test("compilerInit") { val tempDir = os.Path( java.nio.file.Files.createTempDirectory("ammonite-tester-x") ) @@ -169,12 +171,12 @@ object CachingTests extends TestSuite{ predefImports = Interpreter.predefImports ) - runScript(os.pwd, scriptPath/"cachedCompilerInit.sc", interp1) - runScript(os.pwd, scriptPath/"cachedCompilerInit.sc", interp2) + runScript(os.pwd, scriptPath / "cachedCompilerInit.sc", interp1) + runScript(os.pwd, scriptPath / "cachedCompilerInit.sc", interp2) assert(interp2.compilationCount == 0) } - test("changeScriptInvalidation"){ + test("changeScriptInvalidation") { // This makes sure that the compile caches are properly utilized, and // flushed, in a variety of circumstances: changes to the number of // blocks in the predef, predefs containing magic imports, and changes @@ -189,9 +191,9 @@ object CachingTests extends TestSuite{ """) val scriptFile = os.temp("""div("<('.'<)", y).render""") - def processAndCheckCompiler(f: ammonite.compiler.iface.Compiler => Boolean) ={ + def processAndCheckCompiler(f: ammonite.compiler.iface.Compiler => Boolean) = { val interp = createTestInterp( - new Storage.Folder(tempDir){ + new Storage.Folder(tempDir) { override val predef = predefFile }, predefImports = Interpreter.predefImports @@ -224,7 +226,7 @@ object CachingTests extends TestSuite{ processAndCheckCompiler(_ != null) processAndCheckCompiler(_ == null) } - test("changeImportedScriptInvalidation"){ + test("changeImportedScriptInvalidation") { val storageFolder = os.temp.dir() @@ -242,8 +244,7 @@ object CachingTests extends TestSuite{ val ident = tmpFile.last.stripSuffix(".sc") (tmpFile, ident) } - test("simple"){ - + test("simple") { val (upstream, upstreamIdent) = createScript( """println("barr") @@ -255,15 +256,14 @@ object CachingTests extends TestSuite{ val (downstream, _) = createScript( s"""import $$file.$upstreamIdent - |println("hello11") - | - |println($upstreamIdent.x) + |println("hello11") + | + |println($upstreamIdent.x) """.stripMargin, - dir = upstream/os.up, + dir = upstream / os.up, name = "downstream" ) - // Upstream, downstream runScript(downstream, 2) runScript(downstream, 0) @@ -289,11 +289,10 @@ object CachingTests extends TestSuite{ s"""import $$file.$upstreamIdent |println("hello") | - |println($upstreamIdent.x) + |println($upstreamIdent.x) """.stripMargin ) - runScript(downstream, 1) runScript(downstream, 0) runScript(downstream, 0) @@ -340,7 +339,7 @@ object CachingTests extends TestSuite{ runScript(downstream, 0) } - test("diamond"){ + test("diamond") { val (upstream, upstreamIdent) = createScript( """println("uppstreamm") |val x = 1 @@ -355,7 +354,7 @@ object CachingTests extends TestSuite{ |val a = $upstreamIdent.x + 1 | """.stripMargin, - dir = upstream/os.up, + dir = upstream / os.up, name = "middleA" ) @@ -365,7 +364,7 @@ object CachingTests extends TestSuite{ |val b = $upstreamIdent.x + 2 | """.stripMargin, - dir = upstream/os.up, + dir = upstream / os.up, name = "middleB" ) @@ -375,11 +374,10 @@ object CachingTests extends TestSuite{ |println("downstreammm") |println($middleAIdent.a + $middleBIdent.b) """.stripMargin, - dir = upstream/os.up, + dir = upstream / os.up, name = "downstream" ) - // upstream + middleA + middleB + downstream // ensure we don't compile `upstream` twice when it's depended upon twice runScript(downstream, 4) @@ -392,7 +390,6 @@ object CachingTests extends TestSuite{ runScript(downstream, 0) runScript(downstream, 0) - os.write.append(middleA, Util.newLine + "val dummy = 1") // Unfortunately, this currently causes `middleB` to get re-processed diff --git a/amm/src/test/scala/ammonite/interp/CompilerSettingsTests.scala b/amm/src/test/scala/ammonite/interp/CompilerSettingsTests.scala index 3acdc353e..4272c53be 100644 --- a/amm/src/test/scala/ammonite/interp/CompilerSettingsTests.scala +++ b/amm/src/test/scala/ammonite/interp/CompilerSettingsTests.scala @@ -13,13 +13,13 @@ object CompilerSettingsTests extends TestSuite { val scriptPath = os.pwd / "amm" / "src" / "test" / "resources" / "scriptCompilerSettings" - test("configureYrangepos"){ - + test("configureYrangepos") { + // In this test, the script sets -Yrangepos to true using "configureCompiler", // which is called AFTER the compiler instantiates. As useOffsetPositions // is set eagerly during the compiler instantiation as !Yrangepos, its // value remains "true". - if (scala2_12){ + if (scala2_12) { val storage = Storage.InMemory() val interp = createTestInterp(storage) Scripts.runScript(os.pwd, scriptPath / "configureCompiler.sc", interp) @@ -35,7 +35,7 @@ object CompilerSettingsTests extends TestSuite { } else "Disabled" } - test("preConfigureYrangepos"){ + test("preConfigureYrangepos") { // In this test, the script sets -Yrangepos using "preConfigureCompiler", // which is called BEFORE the compiler instantiates, resulting in // useOffsetPositions initializing as false, as expected diff --git a/amm/src/test/scala/ammonite/interp/YRangeposTests.scala b/amm/src/test/scala/ammonite/interp/YRangeposTests.scala index 4736daa71..e24c8f82f 100644 --- a/amm/src/test/scala/ammonite/interp/YRangeposTests.scala +++ b/amm/src/test/scala/ammonite/interp/YRangeposTests.scala @@ -30,7 +30,7 @@ object YRangeposTests extends TestSuite { val res = Scripts.runScript(os.pwd, scriptFolderPath / "yRangepos.sc", interp) assert(res.isSuccess) } - test("Yrangepos"){ + test("Yrangepos") { if (scala2) simpleTest() else "Disabled in Scala 3" } @@ -40,10 +40,12 @@ object YRangeposTests extends TestSuite { // behaviour, by checking that the line at which the error is found matches // the expected one in the file val expectedErrorMessage = "yRangeposError.sc:9: type mismatch;" - checkErrorMessage(InProcessMainMethodRunner.base / "scriptCompilerSettings" / "yRangeposError.sc", - expectedErrorMessage) + checkErrorMessage( + InProcessMainMethodRunner.base / "scriptCompilerSettings" / "yRangeposError.sc", + expectedErrorMessage + ) } - test("YrangeposError"){ + test("YrangeposError") { if (scala2) errorTest() else "Disabled in Scala 3" } diff --git a/amm/src/test/scala/ammonite/interp/script/AmmoniteBuildServerTests.scala b/amm/src/test/scala/ammonite/interp/script/AmmoniteBuildServerTests.scala index 31fa57ece..0d78b42fe 100644 --- a/amm/src/test/scala/ammonite/interp/script/AmmoniteBuildServerTests.scala +++ b/amm/src/test/scala/ammonite/interp/script/AmmoniteBuildServerTests.scala @@ -20,7 +20,7 @@ object AmmoniteBuildServerTests extends TestSuite { FutureConverters.toScala(f) } - val scriptBase = os.pwd/"amm"/"src"/"test"/"resources"/"bsp" + val scriptBase = os.pwd / "amm" / "src" / "test" / "resources" / "bsp" val wd = os.temp.dir(deleteOnExit = true) @@ -575,8 +575,8 @@ object AmmoniteBuildServerTests extends TestSuite { val otherScriptUri = (wd / otherScriptPath).toNIO.toUri.toASCIIString def semanticDb( - scalacOptionsItem: ScalacOptionsItem, - scriptPath: os.RelPath + scalacOptionsItem: ScalacOptionsItem, + scriptPath: os.RelPath ): TextDocument = { import scala.meta.internal.semanticdb._ @@ -688,7 +688,6 @@ object AmmoniteBuildServerTests extends TestSuite { } yield () } - } class BspScriptRunner(wd: os.Path, script: Seq[os.Path]) { @@ -771,7 +770,7 @@ object AmmoniteBuildServerTests extends TestSuite { val targets = buildTargetsResp.getTargets() assert(targets.asScala.nonEmpty) val scalaTarget = targets.get(0).getData().asInstanceOf[ScalaBuildTarget] - + if (isScala2) { assert(scalaTarget.getScalaVersion().startsWith("2")) assert(scalaTarget.getScalaBinaryVersion().startsWith("2")) diff --git a/amm/src/test/scala/ammonite/interp/script/TestBuildClient.scala b/amm/src/test/scala/ammonite/interp/script/TestBuildClient.scala index cd58981f1..6e9a96e29 100644 --- a/amm/src/test/scala/ammonite/interp/script/TestBuildClient.scala +++ b/amm/src/test/scala/ammonite/interp/script/TestBuildClient.scala @@ -20,8 +20,8 @@ class TestBuildClient extends BuildClient { private val didChangeNotifications0 = new ConcurrentLinkedQueue[BuildTargetEvent] def diagnostics( - targetId: BuildTargetIdentifier, - document: TextDocumentIdentifier + targetId: BuildTargetIdentifier, + document: TextDocumentIdentifier ): Option[Seq[BDiagnostic]] = Option(diagnostics0.get((targetId, document))) diff --git a/amm/src/test/scala/ammonite/main/InProcessMainMethodRunner.scala b/amm/src/test/scala/ammonite/main/InProcessMainMethodRunner.scala index 28643e3ab..cada06a82 100644 --- a/amm/src/test/scala/ammonite/main/InProcessMainMethodRunner.scala +++ b/amm/src/test/scala/ammonite/main/InProcessMainMethodRunner.scala @@ -2,38 +2,36 @@ package ammonite.main import java.io.{ByteArrayInputStream, ByteArrayOutputStream} - -object InProcessMainMethodRunner{ - val base = os.pwd/"amm"/"src"/"test"/"resources" +object InProcessMainMethodRunner { + val base = os.pwd / "amm" / "src" / "test" / "resources" } /** - * An in-memory mock subprocess; exposes the same API as a subprocess call: - * stdin, stdout, stderr, and command-line arguments as a Array[String]. But - * runs the computation in memory, which is often a lot faster. - * - * Attempts to capture stdout and stderr from the current thread via - * the `Console.with*` methods. This doesn't always work, e.g. it doesn't - * capture Java `System.*.println` methods, and overall this doesn't guarantee - * JVM-level isolation between tests. But it works well enough for - * our unit tests. - */ -class InProcessMainMethodRunner(p: os.Path, preArgs: List[String], args: Seq[String]){ + * An in-memory mock subprocess; exposes the same API as a subprocess call: + * stdin, stdout, stderr, and command-line arguments as a Array[String]. But + * runs the computation in memory, which is often a lot faster. + * + * Attempts to capture stdout and stderr from the current thread via + * the `Console.with*` methods. This doesn't always work, e.g. it doesn't + * capture Java `System.*.println` methods, and overall this doesn't guarantee + * JVM-level isolation between tests. But it works well enough for + * our unit tests. + */ +class InProcessMainMethodRunner(p: os.Path, preArgs: List[String], args: Seq[String]) { val in = new ByteArrayInputStream(Array.empty[Byte]) val err0 = new ByteArrayOutputStream() val out0 = new ByteArrayOutputStream() val path = p - - val success = Console.withIn(in){ - Console.withErr(err0){ - Console.withOut(out0){ + val success = Console.withIn(in) { + Console.withErr(err0) { + Console.withOut(out0) { ammonite.AmmoniteMain.main0( List("--home", os.temp.dir().toString) ++ - preArgs ++ - Seq(path.toString) ++ - args.toList, + preArgs ++ + Seq(path.toString) ++ + args.toList, in, out0, err0 @@ -42,7 +40,6 @@ class InProcessMainMethodRunner(p: os.Path, preArgs: List[String], args: Seq[Str } } - val out = new String(out0.toByteArray) val err = new String(err0.toByteArray) override def toString = { diff --git a/amm/src/test/scala/ammonite/main/InProcessMainMethodRunnerRawArgs.scala b/amm/src/test/scala/ammonite/main/InProcessMainMethodRunnerRawArgs.scala index 6a76c4e35..270546cb9 100644 --- a/amm/src/test/scala/ammonite/main/InProcessMainMethodRunnerRawArgs.scala +++ b/amm/src/test/scala/ammonite/main/InProcessMainMethodRunnerRawArgs.scala @@ -3,23 +3,23 @@ package ammonite.main import java.io.{ByteArrayInputStream, ByteArrayOutputStream} /** - * Same purpose as [[InProcessMainMethodRunner]] but it accepts the arguments - * as they are. This is somewhat analogous to [[ammonite.Main.main0]] but it - * attempts to capture the output and error streams like - * [[InProcessMainMethodRunner]] without having to append any prefix arguments - * such as the current path. - * - * @see [[InProcessMainMethodRunner]]. - */ -class InProcessMainMethodRunnerRawArgs(args: Seq[String]){ + * Same purpose as [[InProcessMainMethodRunner]] but it accepts the arguments + * as they are. This is somewhat analogous to [[ammonite.Main.main0]] but it + * attempts to capture the output and error streams like + * [[InProcessMainMethodRunner]] without having to append any prefix arguments + * such as the current path. + * + * @see [[InProcessMainMethodRunner]]. + */ +class InProcessMainMethodRunnerRawArgs(args: Seq[String]) { val in = new ByteArrayInputStream(Array.empty[Byte]) val err0 = new ByteArrayOutputStream() val out0 = new ByteArrayOutputStream() - val success = Console.withIn(in){ - Console.withErr(err0){ - Console.withOut(out0){ + val success = Console.withIn(in) { + Console.withErr(err0) { + Console.withOut(out0) { ammonite.AmmoniteMain.main0(args.toList, in, out0, err0) } } diff --git a/amm/src/test/scala/ammonite/main/LineNumberTests.scala b/amm/src/test/scala/ammonite/main/LineNumberTests.scala index e1f401a5f..89d2bf37c 100644 --- a/amm/src/test/scala/ammonite/main/LineNumberTests.scala +++ b/amm/src/test/scala/ammonite/main/LineNumberTests.scala @@ -1,17 +1,16 @@ package ammonite.main - import ammonite.util.Util import utest._ /** - * Make sure that when a script is run with parse errors, compile errors - * or runtime errors, the line numbers in the error message match up with - * the correct line numbers in the original script and not the line numbers - * from the mangled/preprocessed code - */ -object LineNumberTests extends TestSuite{ - val tests = this{ + * Make sure that when a script is run with parse errors, compile errors + * or runtime errors, the line numbers in the error message match up with + * the correct line numbers in the original script and not the line numbers + * from the mangled/preprocessed code + */ +object LineNumberTests extends TestSuite { + val tests = this { def checkErrorMessage(file: os.Path, expected: String): Unit = { val e = new InProcessMainMethodRunner(file, Nil, Nil) @@ -22,7 +21,7 @@ object LineNumberTests extends TestSuite{ val sv = ammonite.compiler.CompilerBuilder.scalaVersion val isScala2 = sv.startsWith("2.") - test("sourcecode"){ + test("sourcecode") { if (isScala2) { val path = InProcessMainMethodRunner.base / "lineNumbers" / "sourceCodeMetadata.sc" checkErrorMessage( @@ -33,81 +32,83 @@ object LineNumberTests extends TestSuite{ ) } } - //All Syntax Error tests currently don't pass on windows as fastparse gives out some 10 - //surrounding chars which are different on windows and linux due to `\n` and `\r\n` - //as `\r\n` counts as 2 so less number of surrounding chars are shown on windows - test("errorTest"){ - if(!Util.windowsPlatform) { + // All Syntax Error tests currently don't pass on windows as fastparse gives out some 10 + // surrounding chars which are different on windows and linux due to `\n` and `\r\n` + // as `\r\n` counts as 2 so less number of surrounding chars are shown on windows + test("errorTest") { + if (!Util.windowsPlatform) { val path = InProcessMainMethodRunner.base / "lineNumbers" / "ErrorLineNumberTest.sc" checkErrorMessage( file = path, expected = Util.normalizeNewlines( if (isScala2) s"""$path:5:24 expected "}" - | printlnqs(unsorted)) - | ^""".stripMargin + | printlnqs(unsorted)) + | ^""".stripMargin else s"""$path - |-- [E040] Syntax Error: :5:23 ---------------------------------------- - |5 | printlnqs(unsorted)) - | | ^ - | | '}' expected, but ')' found""".stripMargin + |-- [E040] Syntax Error: :5:23 ---------------------------------------- + |5 | printlnqs(unsorted)) + | | ^ + | | '}' expected, but ')' found""".stripMargin ) ) } } - test("multipleCompilationUnitErrorTest1"){ - if(!Util.windowsPlatform) { - val path = InProcessMainMethodRunner.base / "lineNumbers"/"MultipleCompilationUnitErrorMsgTest1.sc" - checkErrorMessage( + test("multipleCompilationUnitErrorTest1") { + if (!Util.windowsPlatform) { + val path = + InProcessMainMethodRunner.base / "lineNumbers" / "MultipleCompilationUnitErrorMsgTest1.sc" + checkErrorMessage( file = path, expected = Util.normalizeNewlines( if (isScala2) s"""$path:5:1 expected end-of-input - |} - |^""".stripMargin + |} + |^""".stripMargin else s"""$path - |-- [E040] Syntax Error: :3:0 ----------------------------------------- - |3 |} - | |^ - | |eof expected, but '}' found""".stripMargin + |-- [E040] Syntax Error: :3:0 ----------------------------------------- + |3 |} + | |^ + | |eof expected, but '}' found""".stripMargin ) ) } } - - test("multipleCompilationUnitErrorTest2"){ - if(!Util.windowsPlatform) { - val path = InProcessMainMethodRunner.base / "lineNumbers"/"MultipleCompilationUnitErrorMsgTest2.sc" + test("multipleCompilationUnitErrorTest2") { + if (!Util.windowsPlatform) { + val path = + InProcessMainMethodRunner.base / "lineNumbers" / "MultipleCompilationUnitErrorMsgTest2.sc" checkErrorMessage( file = path, expected = Util.normalizeNewlines( if (isScala2) s"""$path:3:1 expected end-of-input - |} - |^""".stripMargin + |} + |^""".stripMargin else s"""$path - |-- [E040] Syntax Error: :3:0 ----------------------------------------- - |3 |} - | |^ - | |eof expected, but '}' found""".stripMargin + |-- [E040] Syntax Error: :3:0 ----------------------------------------- + |3 |} + | |^ + | |eof expected, but '}' found""".stripMargin ) ) } } test("compilationErrorWithCommentsAtTop") { - val path = InProcessMainMethodRunner.base / "lineNumbers"/"compilationErrorWithCommentsAtTop.sc" + val path = + InProcessMainMethodRunner.base / "lineNumbers" / "compilationErrorWithCommentsAtTop.sc" checkErrorMessage( file = path, expected = Util.normalizeNewlines( if (isScala2) s"""$path:11: not found: value quicort - | quicort(unsorted.filter(_ < pivot)):::List(pivot):::""".stripMargin + + | quicort(unsorted.filter(_ < pivot)):::List(pivot):::""".stripMargin + """quicksort(unsorted.filter(_ > pivot))""" else { val firstLine = "quicort(unsorted.filter(_ < pivot)):::List(pivot):::" + @@ -123,14 +124,14 @@ object LineNumberTests extends TestSuite{ } test("compilationErrorInSecondBlock") { - val path = InProcessMainMethodRunner.base / "lineNumbers"/"compilationErrorInSecondBlock.sc" + val path = InProcessMainMethodRunner.base / "lineNumbers" / "compilationErrorInSecondBlock.sc" checkErrorMessage( file = path, expected = Util.normalizeNewlines( if (isScala2) s"""$path:14: not found: value printnl - |val res_0 = printnl("OK") - | ^""".stripMargin + |val res_0 = printnl("OK") + | ^""".stripMargin else { val sp = " " s"""-- [E006] Not Found Error: $path:1:12$sp @@ -143,14 +144,14 @@ object LineNumberTests extends TestSuite{ } test("compilationErrorInFourthBlock") { - val path = InProcessMainMethodRunner.base / "lineNumbers"/"compilationErrorInFourthBlock.sc" + val path = InProcessMainMethodRunner.base / "lineNumbers" / "compilationErrorInFourthBlock.sc" checkErrorMessage( file = path, expected = Util.normalizeNewlines( if (isScala2) s"""$path:30: not found: value prinntl - |val res = prinntl("Ammonite") - | ^""".stripMargin + |val res = prinntl("Ammonite") + | ^""".stripMargin else { val sp = " " s"""-- [E006] Not Found Error: $path:3:10$sp @@ -163,7 +164,7 @@ object LineNumberTests extends TestSuite{ } test("compilationErrorInClass") { - val path = InProcessMainMethodRunner.base / "lineNumbers"/"compilationErrorInClass.sc" + val path = InProcessMainMethodRunner.base / "lineNumbers" / "compilationErrorInClass.sc" checkErrorMessage( file = path, expected = @@ -175,14 +176,15 @@ object LineNumberTests extends TestSuite{ } test("CompilationErrorLineNumberTest") { - val path = InProcessMainMethodRunner.base / "lineNumbers" / "CompilationErrorLineNumberTest.sc" + val path = + InProcessMainMethodRunner.base / "lineNumbers" / "CompilationErrorLineNumberTest.sc" checkErrorMessage( file = path, expected = Util.normalizeNewlines( if (isScala2) s"""$path:7: not found: value noSuchObject - | val x = noSuchObject.badFunction - | ^""".stripMargin + | val x = noSuchObject.badFunction + | ^""".stripMargin else { val sp = " " s"""-- [E006] Not Found Error: $path:7:10$sp @@ -196,7 +198,8 @@ object LineNumberTests extends TestSuite{ test("RuntimeCompilationErrorLineNumberTest") - { checkErrorMessage( - file = InProcessMainMethodRunner.base / "lineNumbers"/"RuntimeCompilationErrorLineNumberTest.sc", + file = + InProcessMainMethodRunner.base / "lineNumbers" / "RuntimeCompilationErrorLineNumberTest.sc", expected = s"(RuntimeCompilationErrorLineNumberTest.sc:6)" ) } diff --git a/amm/src/test/scala/ammonite/main/MainTests.scala b/amm/src/test/scala/ammonite/main/MainTests.scala index 55cd6421f..c20912103 100755 --- a/amm/src/test/scala/ammonite/main/MainTests.scala +++ b/amm/src/test/scala/ammonite/main/MainTests.scala @@ -6,10 +6,10 @@ import ammonite.util.Util import utest._ /** - * Tests around Ammonite's CLI handling of main methods, argument parsing, - * and the associated error behavior if the caller messes up. + * Tests around Ammonite's CLI handling of main methods, argument parsing, + * and the associated error behavior if the caller messes up. */ -object MainTests extends TestSuite{ +object MainTests extends TestSuite { def exec(p: String, args: String*) = new InProcessMainMethodRunner(InProcessMainMethodRunner.base / "mains" / p, Nil, args) @@ -22,13 +22,13 @@ object MainTests extends TestSuite{ def tests = Tests { println("Running MainTests") - test("hello"){ + test("hello") { val evaled = exec("Hello.sc") assert(evaled.out.trim == "Hello World") } - test("compilerCrash"){ - if(TestUtils.scala2_11){ + test("compilerCrash") { + if (TestUtils.scala2_11) { val evaled = exec("CompilerCrash.sc") // Make sure we do not accidentally lose the stack trace in the case // where the script fails during compilation before entering the evaluator @@ -38,9 +38,9 @@ object MainTests extends TestSuite{ // Not really related to main methods, but related since most of the main // logic revolves around handling arguments. Make sure this fails properly - test("badAmmoniteFlag"){ + test("badAmmoniteFlag") { val evaled = new InProcessMainMethodRunner( - InProcessMainMethodRunner.base / "mains"/"Hello.sc", + InProcessMainMethodRunner.base / "mains" / "Hello.sc", List("--doesnt-exist"), Nil ) @@ -48,29 +48,29 @@ object MainTests extends TestSuite{ val expected = "Unknown Ammonite option: --doesnt-exist" assert(evaled.err.toString.contains(expected)) } - //make sure scripts with symbols in path names work fine + // make sure scripts with symbols in path names work fine - test("main"){ - test("single"){ + test("main") { + test("single") { val evaled = exec("Main.sc") assert(evaled.success) val out = evaled.out assert(out.contains("Hello! 1")) } - test("multiple"){ - test("positiveNoArgs"){ + test("multiple") { + test("positiveNoArgs") { val evaled = exec("MultiMain.sc", "mainA") assert(evaled.success) val out = evaled.out assert(out == "Hello! 1" + Util.newLine) } - test("positiveArgs"){ + test("positiveArgs") { val evaled = exec("MultiMainDoc.sc", "functionB", "2", "foo") assert(evaled.success) val out = evaled.out assert(out == "Hello! foofoo ." + Util.newLine) } - test("specifyMain"){ + test("specifyMain") { val evaled = exec("MultiMain.sc") assert(!evaled.success) val out = evaled.err @@ -79,7 +79,7 @@ object MainTests extends TestSuite{ ) assert(out.contains(expected.trim)) } - test("specifyMainDoc"){ + test("specifyMainDoc") { val evaled = exec("MultiMainDoc.sc") assert(!evaled.success) val out = evaled.err @@ -88,7 +88,7 @@ object MainTests extends TestSuite{ ) assert(out.contains(expected.trim)) } - test("cantFindMain"){ + test("cantFindMain") { val evaled = exec("MultiMainDoc.sc", "doesntExist") assert(!evaled.success) val out = evaled.err @@ -97,7 +97,7 @@ object MainTests extends TestSuite{ ) assert(out.contains(expected.trim)) } - test("emptyArg"){ + test("emptyArg") { if (TestUtils.scala2) { val evaled = exec("ArgList.sc", "") assert(evaled.success) @@ -108,8 +108,8 @@ object MainTests extends TestSuite{ } } - test("args"){ - test("version"){ + test("args") { + test("version") { // Unlike other flags, activating the version flag (if it ever appears // as one of the flags passed in) should show Ammonite's version and // then quickly exit afterwards. @@ -119,53 +119,53 @@ object MainTests extends TestSuite{ val expectedVersionOutput = s"Ammonite REPL & Script-Runner, ${ammonite.Constants.version}" - test("longVersionFlag"){ + test("longVersionFlag") { val evaled = execRawArgs("--version") assert(evaled.success) assert(evaled.out.trim == expectedVersionOutput) } - test("shortVersionFlag"){ + test("shortVersionFlag") { val evaled = execRawArgs("-v") assert(evaled.success) assert(evaled.out.trim == expectedVersionOutput) } - test("longVersionFlagWithOtherArgs"){ + test("longVersionFlagWithOtherArgs") { val evaled = execRawArgs("--version", "-i", "-w") assert(evaled.success) assert(evaled.out.trim == expectedVersionOutput) } - test("shortVersionFlagWithOtherArgs"){ + test("shortVersionFlagWithOtherArgs") { val evaled = execRawArgs("-v", "-i", "-w") assert(evaled.success) assert(evaled.out.trim == expectedVersionOutput) } } - test("full"){ - val evaled = exec("Args.sc", "-i", "3", "-s", "Moo", (os.pwd/"omg"/"moo").toString) + test("full") { + val evaled = exec("Args.sc", "-i", "3", "-s", "Moo", (os.pwd / "omg" / "moo").toString) assert(evaled.success) assert(evaled.out == ("\"Hello! MooMooMoo moo.\"" + Util.newLine)) } - test("default"){ + test("default") { val evaled = exec("Args.sc", "3", "Moo") assert(evaled.success) assert( evaled.out == ("\"Hello! MooMooMoo Ammonite.\"" + Util.newLine) || - // For some reason, on windows CI machines the repo gets clone as lowercase (???) - evaled.out == ("\"Hello! MooMooMoo ammonite.\"" + Util.newLine) + // For some reason, on windows CI machines the repo gets clone as lowercase (???) + evaled.out == ("\"Hello! MooMooMoo ammonite.\"" + Util.newLine) ) } - test("manualPrintln"){ + test("manualPrintln") { val evaled = exec("Args2.sc", "3", "Moo") assert(evaled.success) assert( evaled.out == ("Hello! MooMooMoo Ammonite." + Util.newLine) || - // For some reason, on windows CI machines the repo gets clone as lowercase (???) - evaled.out == ("Hello! MooMooMoo ammonite." + Util.newLine) + // For some reason, on windows CI machines the repo gets clone as lowercase (???) + evaled.out == ("Hello! MooMooMoo ammonite." + Util.newLine) ) } val argsUsageMsg = @@ -173,7 +173,7 @@ object MainTests extends TestSuite{ | -i | --path | -s """.stripMargin - test("tooFew"){ + test("tooFew") { val evaled = exec("Args.sc", "3") assert(!evaled.success) @@ -184,7 +184,7 @@ object MainTests extends TestSuite{ ) )) } - test("badHalfFlag"){ + test("badHalfFlag") { // Make sure if someone passes in a flag without a corresponding RHS // value, it gets treated as a keyword rather than a dumb parameter // and raises an error if it doesn't exist @@ -198,7 +198,7 @@ object MainTests extends TestSuite{ ) )) } - test("goodHalfFlag"){ + test("goodHalfFlag") { // Make sure if someone passes in a flag without a corresponding RHS // value, it gets treated as a keyword rather than a dumb parameter // and raises an error if it doesn't exist @@ -212,20 +212,25 @@ object MainTests extends TestSuite{ ) )) } - test("varargs"){ + test("varargs") { // Make sure varargs are able to slurp up everything, including args // which start with `--`. This allows a user to define a main method // taking `String*`, slurping up all args un-changed, and then passing // them on to their own custom argument parsing code (e.g. scopt) - val evaled = exec("Varargs.sc", + val evaled = exec( + "Varargs.sc", // Normal args get fulfilled - "-i", "31337", "zomg", + "-i", + "31337", + "zomg", // Make sure single-dash -cow has the single-dash preserved - "-cow", "--omg", + "-cow", + "--omg", // Random non-keyword args get passed straight through "bbq", // Keyword args that match an earlier argument get passed through too - "-i", "x", + "-i", + "x", // And so do flags without a paired argument "--i" ) @@ -238,7 +243,7 @@ object MainTests extends TestSuite{ out.contains("List(-cow, --omg, bbq, -i, x, --i)") ) } - test("argsGivenButNoMain"){ + test("argsGivenButNoMain") { val evaled = exec("Hello.sc", "a", "b", "\"") assert(!evaled.success) @@ -246,7 +251,7 @@ object MainTests extends TestSuite{ """Script Hello.sc does not take arguments: "a" "b" "\""""" )) } - test("tooMany"){ + test("tooMany") { val evaled = exec("Args.sc", "3", "4", "5", "6", "7") assert(!evaled.success) @@ -257,7 +262,7 @@ object MainTests extends TestSuite{ ) )) } - test("multipleErrors"){ + test("multipleErrors") { val evaled = exec("Args.sc", "3", "-i", "4", "--unknown", "6") assert(!evaled.success) @@ -270,7 +275,7 @@ object MainTests extends TestSuite{ ) )) } - test("cantParse"){ + test("cantParse") { val evaled = exec("Args.sc", "foo", "moo") assert(!evaled.success) diff --git a/amm/src/test/scala/ammonite/session/ScriptTests.scala b/amm/src/test/scala/ammonite/session/ScriptTests.scala index 22ac84500..b1a0a33d1 100644 --- a/amm/src/test/scala/ammonite/session/ScriptTests.scala +++ b/amm/src/test/scala/ammonite/session/ScriptTests.scala @@ -8,16 +8,16 @@ import ammonite.runtime.Storage import ammonite.util.Res import utest._ -object ScriptTests extends TestSuite{ - val tests = Tests{ +object ScriptTests extends TestSuite { + val tests = Tests { println("ScriptTests") val check = new TestRepl() val printedScriptPath = """pwd/"amm"/"src"/"test"/"resources"/"scripts"""" - test("exec"){ - test("compilationBlocks"){ - test("loadIvy") - retry(3){ // ivy or maven central seems to be flaky =/ =/ =/ + test("exec") { + test("compilationBlocks") { + test("loadIvy") - retry(3) { // ivy or maven central seems to be flaky =/ =/ =/ check.session(s""" @ import os._ @@ -27,8 +27,8 @@ object ScriptTests extends TestSuite{ r: String = "omg" """) - } - test("preserveImports"){ + } + test("preserveImports") { check.session(s""" @ import os._ @@ -38,7 +38,7 @@ object ScriptTests extends TestSuite{ r: Left[String, Nothing] = ${Print.Left(value = "\"asd\"")} """) } - test("annotation"){ + test("annotation") { check.session(s""" @ import os._ @@ -48,7 +48,7 @@ object ScriptTests extends TestSuite{ r: Int = 24 """) } - test("syntax"){ + test("syntax") { check.session(s""" @ import os._ @@ -58,7 +58,7 @@ object ScriptTests extends TestSuite{ r: Int = 24 """) } - test("limitImports"){ + test("limitImports") { check.session(s""" @ import os._ @@ -69,25 +69,25 @@ object ScriptTests extends TestSuite{ """) } } - test("failures"){ - test("syntaxError"){ + test("failures") { + test("syntaxError") { val errorChunk = if (check.scala2) """ - | @ val r = res - | error: not found: value res - | val r = res - | ^ - | Compilation Failed - | """.stripMargin + | @ val r = res + | error: not found: value res + | val r = res + | ^ + | Compilation Failed + | """.stripMargin else """ - | @ val r = res - | error: val r = res - | ^^^ - | Not found: res - | Compilation Failed - | """.stripMargin + | @ val r = res + | error: val r = res + | ^^^ + | Not found: res + | Compilation Failed + | """.stripMargin check.session(s""" @ import os._ @@ -97,24 +97,24 @@ object ScriptTests extends TestSuite{ $errorChunk """) } - test("compilationError"){ + test("compilationError") { val errorChunk = if (check.scala2) """ - | @ val r = res - | error: not found: value res - | val r = res - | ^ - | Compilation Failed - | """.stripMargin + | @ val r = res + | error: not found: value res + | val r = res + | ^ + | Compilation Failed + | """.stripMargin else """ - | @ val r = res - | error: val r = res - | ^^^ - | Not found: res - | Compilation Failed - | """.stripMargin + | @ val r = res + | error: val r = res + | ^^^ + | Not found: res + | Compilation Failed + | """.stripMargin check.session(s""" @ import os._ @@ -124,33 +124,32 @@ object ScriptTests extends TestSuite{ $errorChunk """) } - test("nofile"){ + test("nofile") { check.session(s""" @ import os._ @ repl.load.exec($printedScriptPath/"notHere") error: java.nio.file.NoSuchFileException - """ - ) + """) } - test("multiBlockError"){ + test("multiBlockError") { val errorChunk = if (check.scala2) """ - | @ val r2 = res2 - | error: not found: value res2 - | val r2 = res2 - | ^ - | Compilation Failed - | """.stripMargin + | @ val r2 = res2 + | error: not found: value res2 + | val r2 = res2 + | ^ + | Compilation Failed + | """.stripMargin else """ - | @ val r2 = res2 - | error: val r2 = res2 - | ^^^^ - | Not found: res2 - | Compilation Failed - | """.stripMargin + | @ val r2 = res2 + | error: val r2 = res2 + | ^^^^ + | Not found: res2 + | Compilation Failed + | """.stripMargin check.session(s""" @ import os._ @@ -161,7 +160,7 @@ object ScriptTests extends TestSuite{ """) } } - test("nestedScripts"){ + test("nestedScripts") { check.session(s""" @ import os._ @@ -174,8 +173,8 @@ object ScriptTests extends TestSuite{ b: Int = 1 """) } - test("sheBang"){ - test("singleLine"){ + test("sheBang") { + test("singleLine") { check.session(s""" @ import os._ @@ -185,7 +184,7 @@ object ScriptTests extends TestSuite{ r: Int = 42 """) } - test("multiLine"){ + test("multiLine") { check.session( s""" @ import os._ @@ -194,15 +193,16 @@ object ScriptTests extends TestSuite{ @ val r = res r: Int = 42 - """) + """ + ) } } } - test("module"){ - test("compilationBlocks"){ - test("loadIvy"){ + test("module") { + test("compilationBlocks") { + test("loadIvy") { check.session(s""" @ import os._ @@ -212,7 +212,7 @@ object ScriptTests extends TestSuite{ r: String = "omg" """) } - test("preserveImports"){ + test("preserveImports") { check.session(s""" @ import os._ @@ -223,7 +223,7 @@ object ScriptTests extends TestSuite{ """) } - test("annotation"){ + test("annotation") { check.session(s""" @ import os._ @@ -234,8 +234,8 @@ object ScriptTests extends TestSuite{ r: Int = 24 """) } - test("syntax"){ - check.session(s""" + test("syntax") { + check.session(s""" @ import os._ @ interp.load.module($printedScriptPath/"BlockSepSyntax.sc") @@ -244,7 +244,7 @@ object ScriptTests extends TestSuite{ r: Int = 24 """) } - test("limitImports"){ + test("limitImports") { check.session(s""" @ import os._ @@ -255,8 +255,8 @@ object ScriptTests extends TestSuite{ """) } } - test("failures"){ - test("syntaxError"){ + test("failures") { + test("syntaxError") { val errorChunk = if (check.scala2) """ @@ -283,7 +283,7 @@ object ScriptTests extends TestSuite{ $errorChunk """) } - test("compilationError"){ + test("compilationError") { val errorChunk = if (check.scala2) """ @@ -309,16 +309,15 @@ object ScriptTests extends TestSuite{ $errorChunk""") } - test("nofile"){ + test("nofile") { check.session(s""" @ import os._ @ repl.load.exec($printedScriptPath/"notHere") error: java.nio.file.NoSuchFileException - """ - ) + """) } - test("scriptWithoutExtension"){ + test("scriptWithoutExtension") { val storage = new Storage.Folder(os.temp.dir(prefix = "ammonite-tester")) val interp2 = createTestInterp( storage, @@ -326,11 +325,11 @@ object ScriptTests extends TestSuite{ ) val Res.Failure(msg) = - Scripts.runScript(os.pwd, os.pwd/"scriptWithoutExtension", interp2) + Scripts.runScript(os.pwd, os.pwd / "scriptWithoutExtension", interp2) assert(msg.contains("Script file not found")) } - test("multiBlockError"){ + test("multiBlockError") { val errorCheck = if (check.scala2) """ @@ -356,7 +355,7 @@ object ScriptTests extends TestSuite{ """ + errorCheck) } } - test("encapsulation"){ + test("encapsulation") { check.session(s""" @ import os._ @@ -364,10 +363,9 @@ object ScriptTests extends TestSuite{ @ interp.load.module($printedScriptPath/"Encapsulation.sc") error: ${check.notFound("asd")} - """ - ) + """) } - test("nestedScripts"){ + test("nestedScripts") { check.session(s""" @ import os._ @@ -380,7 +378,7 @@ object ScriptTests extends TestSuite{ b: Int = 1 """) } - test("noUnWrapping"){ + test("noUnWrapping") { if (check.scala2) check.session(s""" @ import os._ @@ -394,8 +392,8 @@ object ScriptTests extends TestSuite{ """) else "Disabled in Scala 3" // not sure why, in Scala 3, the parser slurps the first '{'… } - test("resolverWithinScript"){ - test("pass"){ + test("resolverWithinScript") { + test("pass") { if (scala2_11) check.session(s""" @ import os._ @@ -404,7 +402,7 @@ object ScriptTests extends TestSuite{ """) } - test("fail"){ + test("fail") { if (scala2_11) check.session(s""" @ import os._ @@ -413,14 +411,14 @@ object ScriptTests extends TestSuite{ """) } } - test("resolverStatic"){ - check.session(s""" + test("resolverStatic") { + check.session(s""" @ import os._ @ interp.load.module($printedScriptPath/"ResolversStatic.sc") """) } - test("loadIvyAdvanced"){ + test("loadIvyAdvanced") { check.session(s""" @ import os._ diff --git a/amm/util/src/main/scala/ammonite/util/Classpath.scala b/amm/util/src/main/scala/ammonite/util/Classpath.scala index 31ef8d239..4f9e97a30 100644 --- a/amm/util/src/main/scala/ammonite/util/Classpath.scala +++ b/amm/util/src/main/scala/ammonite/util/Classpath.scala @@ -5,7 +5,6 @@ import java.net.URL import java.nio.file.{Path, Paths} import java.util.zip.{ZipFile, ZipInputStream} - import io.github.retronym.java9rtexport.Export import scala.util.control.NonFatal @@ -18,8 +17,8 @@ import scala.util.control.NonFatal object Classpath { val traceClasspathIssues = sys.props - .get("ammonite.trace-classpath") - .exists(_.toLowerCase == "true") + .get("ammonite.trace-classpath") + .exists(_.toLowerCase == "true") /** * In memory cache of all the jars used in the compiler. This takes up some @@ -27,8 +26,8 @@ object Classpath { * want to do something. */ def classpath( - classLoader: ClassLoader, - rtCacheDir: Option[Path] + classLoader: ClassLoader, + rtCacheDir: Option[Path] ): Vector[URL] = { lazy val actualRTCacheDir = rtCacheDir.filter { dir => // no need to cache if the storage is in tmpdir @@ -39,9 +38,9 @@ object Classpath { var current = classLoader val files = collection.mutable.Buffer.empty[java.net.URL] val seenClassLoaders = collection.mutable.Buffer.empty[ClassLoader] - while(current != null){ + while (current != null) { seenClassLoaders.append(current) - current match{ + current match { case t: java.net.URLClassLoader => files.appendAll(t.getURLs) case _ => @@ -49,7 +48,6 @@ object Classpath { current = current.getParent } - val sunBoot = System.getProperty("sun.boot.class.path") if (sunBoot != null) { files.appendAll( @@ -61,8 +59,10 @@ object Classpath { ) } else { if (seenClassLoaders.contains(ClassLoader.getSystemClassLoader)) { - for (p <- System.getProperty("java.class.path") - .split(File.pathSeparatorChar) if !p.endsWith("sbt-launch.jar")) { + for ( + p <- System.getProperty("java.class.path") + .split(File.pathSeparatorChar) if !p.endsWith("sbt-launch.jar") + ) { val f = new File(p) if (f.exists()) files.append(f.toURI.toURL) @@ -97,7 +97,7 @@ object Classpath { } catch { case NonFatal(e) => traceClasspathProblem( - s"Classpath element '$url' "+ + s"Classpath element '$url' " + s"could not be opened as jar file because of $e" ) false diff --git a/amm/util/src/main/scala/ammonite/util/Frame.scala b/amm/util/src/main/scala/ammonite/util/Frame.scala index 4c528b4d8..c95163c55 100644 --- a/amm/util/src/main/scala/ammonite/util/Frame.scala +++ b/amm/util/src/main/scala/ammonite/util/Frame.scala @@ -14,8 +14,10 @@ trait Frame { } object Frame { + /** A hook that can be called every time JARs are added to the class path */ trait Hook { + /** Called when new JARs are added to the class path */ def addClasspath(additional: Seq[java.net.URL]): Unit } diff --git a/amm/util/src/main/scala/ammonite/util/Imports.scala b/amm/util/src/main/scala/ammonite/util/Imports.scala index cd2d8caf7..f84fb6c95 100644 --- a/amm/util/src/main/scala/ammonite/util/Imports.scala +++ b/amm/util/src/main/scala/ammonite/util/Imports.scala @@ -1,7 +1,7 @@ /** - * Various common, "dumb" data-structures that represent common things that - * are passed around inside Ammonite - */ + * Various common, "dumb" data-structures that represent common things that + * are passed around inside Ammonite + */ package ammonite.util import ammonite.util.Util.{ClassFiles, VersionedWrapperId, newLine} @@ -9,52 +9,53 @@ import ammonite.util.Util.{ClassFiles, VersionedWrapperId, newLine} import scala.collection.mutable /** - * The serialized output of running a script, including both metadata and the classfile binaries - */ + * The serialized output of running a script, including both metadata and the classfile binaries + */ case class ScriptOutput(processed: ScriptOutput.Metadata, classFiles: Seq[ClassFiles]) -object ScriptOutput{ +object ScriptOutput { + /** - * Metadata extracted from the compilation of a single block, without the classfiles - * but with enough information to fetch the classfiles form disk and evaluate the - * block without compiling/parsing it - */ - case class BlockMetadata(id: VersionedWrapperId, - leadingSpaces: String, - hookInfo: ImportHookInfo, - finalImports: Imports) + * Metadata extracted from the compilation of a single block, without the classfiles + * but with enough information to fetch the classfiles form disk and evaluate the + * block without compiling/parsing it + */ + case class BlockMetadata( + id: VersionedWrapperId, + leadingSpaces: String, + hookInfo: ImportHookInfo, + finalImports: Imports + ) case class Metadata(blockInfo: Seq[BlockMetadata]) } -case class ImportHookInfo(imports: Imports, - stmts: Seq[String], - trees: Seq[ImportTree]) -case class Evaluated(wrapper: Seq[Name], - imports: Imports) +case class ImportHookInfo(imports: Imports, stmts: Seq[String], trees: Seq[ImportTree]) +case class Evaluated(wrapper: Seq[Name], imports: Imports) /** - * Represents the importing of a single name in the Ammonite REPL, of the - * form - * - * {{{ - * import $prefix.{$fromName => $toName} - * }}} - * - * All imports are reduced to this form; `import $prefix.$name` is results in - * the `fromName` and `toName` being the same, while `import $prefix._` or - * `import $prefix.{foo, bar, baz}` are split into multiple distinct - * [[ImportData]] objects. - * - * Note that imports can be of one of three distinct `ImportType`s: importing - * a type, a term, or both. This lets us properly deal with shadowing correctly - * if we import the type and term of the same name from different places - */ -case class ImportData(fromName: Name, - toName: Name, - prefix: Seq[Name], - importType: ImportData.ImportType) - + * Represents the importing of a single name in the Ammonite REPL, of the + * form + * + * {{{ + * import $prefix.{$fromName => $toName} + * }}} + * + * All imports are reduced to this form; `import $prefix.$name` is results in + * the `fromName` and `toName` being the same, while `import $prefix._` or + * `import $prefix.{foo, bar, baz}` are split into multiple distinct + * [[ImportData]] objects. + * + * Note that imports can be of one of three distinct `ImportType`s: importing + * a type, a term, or both. This lets us properly deal with shadowing correctly + * if we import the type and term of the same name from different places + */ +case class ImportData( + fromName: Name, + toName: Name, + prefix: Seq[Name], + importType: ImportData.ImportType +) -object ImportData{ +object ImportData { sealed case class ImportType(name: String) val Type = ImportType("Type") val Term = ImportType("Term") @@ -82,15 +83,15 @@ object ImportData{ } /** - * Represents the imports that occur before a piece of user code in the - * Ammonite REPL. It's basically a `Seq[ImportData]`, except we really want - * it to be always in a "canonical" form without shadowed/duplicate imports. - * - * Thus we only expose an `apply` method which performs this de-duplication, - * and a `++` operator that combines two sets of imports while performing - * de-duplication. - */ -class Imports private (val value: Seq[ImportData]){ + * Represents the imports that occur before a piece of user code in the + * Ammonite REPL. It's basically a `Seq[ImportData]`, except we really want + * it to be always in a "canonical" form without shadowed/duplicate imports. + * + * Thus we only expose an `apply` method which performs this de-duplication, + * and a `++` operator that combines two sets of imports while performing + * de-duplication. + */ +class Imports private (val value: Seq[ImportData]) { def ++(others: Imports) = Imports(this.value, others.value) override def equals(obj: Any): Boolean = obj match { @@ -102,7 +103,7 @@ class Imports private (val value: Seq[ImportData]){ // Group the remaining imports into sliding groups according to their // prefix, while still maintaining their ordering val grouped = mutable.Buffer[mutable.Buffer[ImportData]]() - for(data <- value){ + for (data <- value) { if (grouped.isEmpty) grouped.append(mutable.Buffer(data)) else { val last = grouped.last.last @@ -112,15 +113,15 @@ class Imports private (val value: Seq[ImportData]){ // it to a different name, since you can't import the same thing // twice in a single import statement val startNewImport = - last.prefix != data.prefix || grouped.last.exists(_.fromName == data.fromName) + last.prefix != data.prefix || grouped.last.exists(_.fromName == data.fromName) if (startNewImport) grouped.append(mutable.Buffer(data)) else grouped.last.append(data) } } // Stringify everything - val out = for(group <- grouped) yield { - val printedGroup = for(item <- group) yield{ + val out = for (group <- grouped) yield { + val printedGroup = for (item <- group) yield { if (item.fromName == item.toName) item.fromName.backticked else s"${item.fromName.backticked} => ${item.toName.backticked}" } @@ -132,16 +133,17 @@ class Imports private (val value: Seq[ImportData]){ } } -object Imports{ +object Imports { // This isn't called directly, but we need to define it so uPickle can know // how to read/write imports def unapply(s: Imports): Option[Seq[ImportData]] = Some(s.value) + /** - * Constructs an `Imports` object from one or more loose sequence of imports - * - * Figures out which imports will get stomped over by future imports - * before they get used, and just ignore those. - */ + * Constructs an `Imports` object from one or more loose sequence of imports + * + * Figures out which imports will get stomped over by future imports + * before they get used, and just ignore those. + */ def apply(importss: Seq[ImportData]*): Imports = { // We iterate over the combined reversed imports, keeping track of the // things that will-be-stomped-over-in-the-non-reversed-world in a map. @@ -152,14 +154,14 @@ object Imports{ val stompedTypes = mutable.Set.empty[Name] val stompedTerms = mutable.Set.empty[Name] val out = mutable.Buffer.empty[ImportData] - for(data <- importData.reverseIterator){ - val stomped = data.importType match{ + for (data <- importData.reverseIterator) { + val stomped = data.importType match { case ImportData.Term => Seq(stompedTerms) case ImportData.Type => Seq(stompedTypes) case ImportData.TermType => Seq(stompedTerms, stompedTypes) case _ => ??? // there aren't any other cases } - if (!stomped.exists(_(data.toName))){ + if (!stomped.exists(_(data.toName))) { out.append(data) stomped.foreach(_.add(data.toName)) data.prefix.headOption.foreach(stompedTerms.remove) diff --git a/amm/util/src/main/scala/ammonite/util/Model.scala b/amm/util/src/main/scala/ammonite/util/Model.scala index a993a2b5d..f2a49dc34 100644 --- a/amm/util/src/main/scala/ammonite/util/Model.scala +++ b/amm/util/src/main/scala/ammonite/util/Model.scala @@ -1,48 +1,46 @@ /** - * Various common, "dumb" data-structures that represent common things that - * are passed around inside Ammonite - */ + * Various common, "dumb" data-structures that represent common things that + * are passed around inside Ammonite + */ package ammonite.util - import java.io.PrintStream import _root_.org.tpolecat.typename._ import scala.reflect.NameTransformer - /** - * Information about a particular predef file or snippet. [[hardcoded]] - * represents whether or not we cache the snippet forever regardless of - * classpath, which is true for many "internal" predefs which only do - * imports from Ammonite's own packages and don't rely on external code - */ + * Information about a particular predef file or snippet. [[hardcoded]] + * represents whether or not we cache the snippet forever regardless of + * classpath, which is true for many "internal" predefs which only do + * imports from Ammonite's own packages and don't rely on external code + */ case class PredefInfo(name: Name, code: String, hardcoded: Boolean, path: Option[os.Path]) /** - * Exception for reporting script compilation failures - */ + * Exception for reporting script compilation failures + */ class CompilationError(message: String) extends Exception(message) /** - * A unique key for a piece of code that gets run in a particular environment; - * contains the hash of the code and the hash of the environment stored - * separately, so you can e.g. compare the [[env]] hash even if you don't have - * the code available - */ + * A unique key for a piece of code that gets run in a particular environment; + * contains the hash of the code and the hash of the environment stored + * separately, so you can e.g. compare the [[env]] hash even if you don't have + * the code available + */ case class Tag(code: String, env: String, classPathWhitelistHash: String) /** - * Represents a single identifier in Scala source code, e.g. "scala" or - * "println" or "`Hello-World`". - * - * Holds the value "raw", with all special characters intact, e.g. - * "Hello-World". Can be used [[backticked]] e.g. "`Hello-World`", useful for - * embedding in Scala source code, or [[encoded]] e.g. "Hello$minusWorld", - * useful for accessing names as-seen-from the Java/JVM side of things - */ -case class Name(raw: String){ + * Represents a single identifier in Scala source code, e.g. "scala" or + * "println" or "`Hello-World`". + * + * Holds the value "raw", with all special characters intact, e.g. + * "Hello-World". Can be used [[backticked]] e.g. "`Hello-World`", useful for + * embedding in Scala source code, or [[encoded]] e.g. "Hello$minusWorld", + * useful for accessing names as-seen-from the Java/JVM side of things + */ +case class Name(raw: String) { assert( NameTransformer.decode(raw) == raw, "Name() must be created with un-encoded text" @@ -53,27 +51,74 @@ case class Name(raw: String){ def backticked = Name.backtickWrap(raw) } -object Name{ +object Name { val alphaKeywords = Set( - "abstract", "case", "catch", "class", "def", "do", "else", - "extends", "false", "finally", "final", "finally", "forSome", - "for", "if", "implicit", "import", "lazy", "match", "new", - "null", "object", "override", "package", "private", "protected", - "return", "sealed", "super", "this", "throw", "trait", "try", - "true", "type", "val", "var", "while", "with", "yield", "_", "macro" + "abstract", + "case", + "catch", + "class", + "def", + "do", + "else", + "extends", + "false", + "finally", + "final", + "finally", + "forSome", + "for", + "if", + "implicit", + "import", + "lazy", + "match", + "new", + "null", + "object", + "override", + "package", + "private", + "protected", + "return", + "sealed", + "super", + "this", + "throw", + "trait", + "try", + "true", + "type", + "val", + "var", + "while", + "with", + "yield", + "_", + "macro" ) val symbolKeywords = Set( - ":", ";", "=>", "=", "<-", "<:", "<%", ">:", "#", "@", "\u21d2", "\u2190" + ":", + ";", + "=>", + "=", + "<-", + "<:", + "<%", + ">:", + "#", + "@", + "\u21d2", + "\u2190" ) val blockCommentStart = "/*" val lineCommentStart = "//" /** - * Custom implementation of ID parsing, instead of using the ScalaParse - * version. This lets us avoid loading FastParse and ScalaParse entirely if - * we're running a cached script, which shaves off 200-300ms of startup time. - */ + * Custom implementation of ID parsing, instead of using the ScalaParse + * version. This lets us avoid loading FastParse and ScalaParse entirely if + * we're running a cached script, which shaves off 200-300ms of startup time. + */ def backtickWrap(s: String) = { if (s.isEmpty) "``" else if (s(0) == '`' && s.last == '`') s @@ -88,30 +133,33 @@ object Name{ chunk.forall(c => c.isLetter || c.isDigit || c == '$') || ( chunk.forall(validOperator) && - // operators can only come last - index == chunks.length - 1 && - // but cannot be preceded by only a _ - !(chunks.lift(index - 1).exists(_ == "") && index - 1== 0)) + // operators can only come last + index == chunks.length - 1 && + // but cannot be preceded by only a _ + !(chunks.lift(index - 1).exists(_ == "") && index - 1 == 0) + ) } val firstLetterValid = s(0).isLetter || s(0) == '_' || s(0) == '$' || validOperator(s(0)) val valid = validChunks && - firstLetterValid && - !alphaKeywords.contains(s) && - !symbolKeywords.contains(s) && - !s.contains(blockCommentStart) && - !s.contains(lineCommentStart) + firstLetterValid && + !alphaKeywords.contains(s) && + !symbolKeywords.contains(s) && + !s.contains(blockCommentStart) && + !s.contains(lineCommentStart) if (valid) s else '`' + s + '`' } } } + /** * Encapsulates a read-write cell that can be passed around */ -trait StableRef[T]{ +trait StableRef[T] { + /** * Get the current value of the this [[StableRef]] at this instant in time */ @@ -123,7 +171,8 @@ trait StableRef[T]{ def update(t: T): Unit } -trait Ref[T] extends StableRef[T]{ +trait Ref[T] extends StableRef[T] { + /** * Return a function that can be used to get the value of this [[Ref]] * at any point in time @@ -137,10 +186,10 @@ trait Ref[T] extends StableRef[T]{ def bind(t: => T): Unit } -object Ref{ +object Ref { implicit def refer[T](t: T): Ref[T] = Ref(t) - def live[T](value0: () => T) = new Ref[T]{ + def live[T](value0: () => T) = new Ref[T] { var value: () => T = value0 def live() = value def apply() = value() @@ -154,7 +203,7 @@ object Ref{ /** * Nice pattern matching for chained exceptions */ -object Ex{ +object Ex { def unapplySeq(t: Throwable): Option[Seq[Throwable]] = { def rec(t: Throwable): List[Throwable] = { t match { @@ -166,10 +215,7 @@ object Ex{ } } - - - -trait CodeColors{ +trait CodeColors { def `type`: fansi.Attrs def literal: fansi.Attrs def comment: fansi.Attrs @@ -190,18 +236,20 @@ trait CodeColors{ * @param selected The color of text selected in the line-editor * @param error The color used to print error messages of all kinds */ -case class Colors(prompt: Ref[fansi.Attrs], - ident: Ref[fansi.Attrs], - `type`: Ref[fansi.Attrs], - literal: Ref[fansi.Attrs], - prefix: Ref[fansi.Attrs], - comment: Ref[fansi.Attrs], - keyword: Ref[fansi.Attrs], - selected: Ref[fansi.Attrs], - error: Ref[fansi.Attrs], - warning: Ref[fansi.Attrs], - info: Ref[fansi.Attrs]) -object Colors{ +case class Colors( + prompt: Ref[fansi.Attrs], + ident: Ref[fansi.Attrs], + `type`: Ref[fansi.Attrs], + literal: Ref[fansi.Attrs], + prefix: Ref[fansi.Attrs], + comment: Ref[fansi.Attrs], + keyword: Ref[fansi.Attrs], + selected: Ref[fansi.Attrs], + error: Ref[fansi.Attrs], + warning: Ref[fansi.Attrs], + info: Ref[fansi.Attrs] +) +object Colors { def Default = Colors( fansi.Color.Magenta, @@ -217,9 +265,17 @@ object Colors{ fansi.Color.Blue ) def BlackWhite = Colors( - fansi.Attrs.Empty, fansi.Attrs.Empty, fansi.Attrs.Empty, fansi.Attrs.Empty, - fansi.Attrs.Empty, fansi.Attrs.Empty, fansi.Attrs.Empty, fansi.Attrs.Empty, - fansi.Attrs.Empty, fansi.Attrs.Empty, fansi.Attrs.Empty + fansi.Attrs.Empty, + fansi.Attrs.Empty, + fansi.Attrs.Empty, + fansi.Attrs.Empty, + fansi.Attrs.Empty, + fansi.Attrs.Empty, + fansi.Attrs.Empty, + fansi.Attrs.Empty, + fansi.Attrs.Empty, + fansi.Attrs.Empty, + fansi.Attrs.Empty ) } @@ -228,38 +284,45 @@ object Colors{ * REPL so it can re-create the bindings inside the REPL's scope */ case class Bind[T](name: String, value: T)(implicit val typeName: TypeName[T]) -object Bind{ - implicit def ammoniteReplArrowBinder[T](t: (String, T))(implicit typeName: TypeName[T]): Bind[T] = { +object Bind { + implicit def ammoniteReplArrowBinder[T](t: (String, T))(implicit + typeName: TypeName[T] + ): Bind[T] = { Bind(t._1, t._2)(typeName) } } + /** - * Encapsulates the ways the Ammonite REPL prints things. Does not print - * a trailing newline by default; you have to add one yourself. - * - * @param outStream Direct access to print to stdout - * @param errStream Direct access to print to stderr - * @param resultStream Direct access to print the result of the entered code - * @param warning How you want it to print a compile warning - * @param error How you want it to print a compile error - * @param info How you want to print compile info logging. *Not* the same - * as `out`, which is used to print runtime output. - */ -case class Printer(outStream: PrintStream, - errStream: PrintStream, - resultStream: PrintStream, - warning: String => Unit, - error: String => Unit, - info: String => Unit) + * Encapsulates the ways the Ammonite REPL prints things. Does not print + * a trailing newline by default; you have to add one yourself. + * + * @param outStream Direct access to print to stdout + * @param errStream Direct access to print to stderr + * @param resultStream Direct access to print the result of the entered code + * @param warning How you want it to print a compile warning + * @param error How you want it to print a compile error + * @param info How you want to print compile info logging. *Not* the same + * as `out`, which is used to print runtime output. + */ +case class Printer( + outStream: PrintStream, + errStream: PrintStream, + resultStream: PrintStream, + warning: String => Unit, + error: String => Unit, + info: String => Unit +) -case class ImportTree(prefix: Seq[String], - mappings: Option[ImportTree.ImportMapping], - start: Int, - end: Int) { +case class ImportTree( + prefix: Seq[String], + mappings: Option[ImportTree.ImportMapping], + start: Int, + end: Int +) { lazy val strippedPrefix: Seq[String] = prefix.takeWhile(_(0) == '$').map(_.stripPrefix("$")) } -object ImportTree{ +object ImportTree { type ImportMapping = Seq[(String, Option[String])] } diff --git a/amm/util/src/main/scala/ammonite/util/PositionOffsetConversion.scala b/amm/util/src/main/scala/ammonite/util/PositionOffsetConversion.scala index 3b0795aff..81b0c1281 100644 --- a/amm/util/src/main/scala/ammonite/util/PositionOffsetConversion.scala +++ b/amm/util/src/main/scala/ammonite/util/PositionOffsetConversion.scala @@ -26,7 +26,7 @@ object PositionOffsetConversion { def isAtEndOfLine(idx: Int) = charAtIsEOL(idx) { case CR | LF => true - case _ => false + case _ => false } val buf = new mutable.ArrayBuffer[Int] @@ -114,18 +114,19 @@ object PositionOffsetConversion { private val firstLineWrapperPrefix = "/*