Ignored, scalac won't parse: 1287
Search state exploded: 56
SearchStateExploded(ScalaFile(
project: apache
github: https://github.com/apache/spark/blob/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuite.scala
raw: https://raw.githubusercontent.com/apache/spark/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/expressions/CastSuite.scala
))
SearchStateExploded(ScalaFile(
project: akka
github: https://github.com/akka/akka/blob/3698928fbdaa8fef8e2037fbc51887ab60addefb/akka-stream/src/main/scala/akka/stream/ActorMaterializer.scala
raw: https://raw.githubusercontent.com/akka/akka/3698928fbdaa8fef8e2037fbc51887ab60addefb/akka-stream/src/main/scala/akka/stream/ActorMaterializer.scala
))
SearchStateExploded(ScalaFile(
project: scala-js
github: https://github.com/scala-js/scala-js/blob/8663c8060ca96a51bfc1f87f2f3f30babbeadbc3/tools/shared/src/main/scala/org/scalajs/core/tools/linker/backend/emitter/JSDesugaring.scala
raw: https://raw.githubusercontent.com/scala-js/scala-js/8663c8060ca96a51bfc1f87f2f3f30babbeadbc3/tools/shared/src/main/scala/org/scalajs/core/tools/linker/backend/emitter/JSDesugaring.scala
))
SearchStateExploded(ScalaFile(
project: apache
github: https://github.com/apache/spark/blob/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/graphx/src/test/scala/org/apache/spark/graphx/GraphSuite.scala
raw: https://raw.githubusercontent.com/apache/spark/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/graphx/src/test/scala/org/apache/spark/graphx/GraphSuite.scala
))
SearchStateExploded(ScalaFile(
project: apache
github: https://github.com/apache/spark/blob/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ConstantFoldingSuite.scala
raw: https://raw.githubusercontent.com/apache/spark/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ConstantFoldingSuite.scala
))
SearchStateExploded(ScalaFile(
project: scala-js
github: https://github.com/scala-js/scala-js/blob/8663c8060ca96a51bfc1f87f2f3f30babbeadbc3/tools/shared/src/main/scala/org/scalajs/core/tools/linker/frontend/optimizer/OptimizerCore.scala
raw: https://raw.githubusercontent.com/scala-js/scala-js/8663c8060ca96a51bfc1f87f2f3f30babbeadbc3/tools/shared/src/main/scala/org/scalajs/core/tools/linker/frontend/optimizer/OptimizerCore.scala
))
SearchStateExploded(ScalaFile(
project: akka
github: https://github.com/akka/akka/blob/3698928fbdaa8fef8e2037fbc51887ab60addefb/akka-stream/src/main/scala/akka/stream/javadsl/Flow.scala
raw: https://raw.githubusercontent.com/akka/akka/3698928fbdaa8fef8e2037fbc51887ab60addefb/akka-stream/src/main/scala/akka/stream/javadsl/Flow.scala
))
SearchStateExploded(ScalaFile(
project: scala-js
github: https://github.com/scala-js/scala-js/blob/8663c8060ca96a51bfc1f87f2f3f30babbeadbc3/library/src/main/scala/scala/scalajs/js/Any.scala
raw: https://raw.githubusercontent.com/scala-js/scala-js/8663c8060ca96a51bfc1f87f2f3f30babbeadbc3/library/src/main/scala/scala/scalajs/js/Any.scala
))
SearchStateExploded(ScalaFile(
project: apache
github: https://github.com/apache/spark/blob/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/mllib/src/main/scala/org/apache/spark/ml/param/shared/SharedParamsCodeGen.scala
raw: https://raw.githubusercontent.com/apache/spark/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/mllib/src/main/scala/org/apache/spark/ml/param/shared/SharedParamsCodeGen.scala
))
SearchStateExploded(ScalaFile(
project: scala-js
github: https://github.com/scala-js/scala-js/blob/8663c8060ca96a51bfc1f87f2f3f30babbeadbc3/library/src/main/scala/scala/scalajs/js/ThisFunction.scala
raw: https://raw.githubusercontent.com/scala-js/scala-js/8663c8060ca96a51bfc1f87f2f3f30babbeadbc3/library/src/main/scala/scala/scalajs/js/ThisFunction.scala
))
SearchStateExploded(ScalaFile(
project: JetBrains
github: https://github.com/JetBrains/intellij-scala/blob/45c86930977278eb445b6aba79ac7011bc418490/src/org/jetbrains/plugins/scala/lang/psi/ScalaPsiUtil.scala
raw: https://raw.githubusercontent.com/JetBrains/intellij-scala/45c86930977278eb445b6aba79ac7011bc418490/src/org/jetbrains/plugins/scala/lang/psi/ScalaPsiUtil.scala
))
SearchStateExploded(ScalaFile(
project: akka
github: https://github.com/akka/akka/blob/3698928fbdaa8fef8e2037fbc51887ab60addefb/akka-actor-tests/src/test/scala/akka/actor/SupervisorHierarchySpec.scala
raw: https://raw.githubusercontent.com/akka/akka/3698928fbdaa8fef8e2037fbc51887ab60addefb/akka-actor-tests/src/test/scala/akka/actor/SupervisorHierarchySpec.scala
))
SearchStateExploded(ScalaFile(
project: akka
github: https://github.com/akka/akka/blob/3698928fbdaa8fef8e2037fbc51887ab60addefb/akka-stream-tests/src/test/scala/akka/stream/DslFactoriesConsistencySpec.scala
raw: https://raw.githubusercontent.com/akka/akka/3698928fbdaa8fef8e2037fbc51887ab60addefb/akka-stream-tests/src/test/scala/akka/stream/DslFactoriesConsistencySpec.scala
))
SearchStateExploded(ScalaFile(
project: scala-js
github: https://github.com/scala-js/scala-js/blob/8663c8060ca96a51bfc1f87f2f3f30babbeadbc3/project/JavaLangObject.scala
raw: https://raw.githubusercontent.com/scala-js/scala-js/8663c8060ca96a51bfc1f87f2f3f30babbeadbc3/project/JavaLangObject.scala
))
SearchStateExploded(ScalaFile(
project: scala-js
github: https://github.com/scala-js/scala-js/blob/8663c8060ca96a51bfc1f87f2f3f30babbeadbc3/project/JavaLangString.scala
raw: https://raw.githubusercontent.com/scala-js/scala-js/8663c8060ca96a51bfc1f87f2f3f30babbeadbc3/project/JavaLangString.scala
))
SearchStateExploded(ScalaFile(
project: scala-js
github: https://github.com/scala-js/scala-js/blob/8663c8060ca96a51bfc1f87f2f3f30babbeadbc3/sbt-plugin/src/main/scala/scala/scalajs/sbtplugin/ScalaJSPluginInternal.scala
raw: https://raw.githubusercontent.com/scala-js/scala-js/8663c8060ca96a51bfc1f87f2f3f30babbeadbc3/sbt-plugin/src/main/scala/scala/scalajs/sbtplugin/ScalaJSPluginInternal.scala
))
SearchStateExploded(ScalaFile(
project: JetBrains
github: https://github.com/JetBrains/intellij-scala/blob/45c86930977278eb445b6aba79ac7011bc418490/src/org/jetbrains/plugins/scala/lang/psi/api/toplevel/ScNamedElement.scala
raw: https://raw.githubusercontent.com/JetBrains/intellij-scala/45c86930977278eb445b6aba79ac7011bc418490/src/org/jetbrains/plugins/scala/lang/psi/api/toplevel/ScNamedElement.scala
))
SearchStateExploded(ScalaFile(
project: twitter
github: https://github.com/twitter/scalding/blob/7759d9c399896fc4c54be0f2406e047e030d8586/scalding-core/src/main/scala/com/twitter/scalding/macros/impl/TypeDescriptorProviderImpl.scala
raw: https://raw.githubusercontent.com/twitter/scalding/7759d9c399896fc4c54be0f2406e047e030d8586/scalding-core/src/main/scala/com/twitter/scalding/macros/impl/TypeDescriptorProviderImpl.scala
))
SearchStateExploded(ScalaFile(
project: twitter
github: https://github.com/twitter/scalding/blob/7759d9c399896fc4c54be0f2406e047e030d8586/scalding-core/src/main/scala/com/twitter/scalding/typed/GeneratedFlattenGroup.scala
raw: https://raw.githubusercontent.com/twitter/scalding/7759d9c399896fc4c54be0f2406e047e030d8586/scalding-core/src/main/scala/com/twitter/scalding/typed/GeneratedFlattenGroup.scala
))
SearchStateExploded(ScalaFile(
project: JetBrains
github: https://github.com/JetBrains/intellij-scala/blob/45c86930977278eb445b6aba79ac7011bc418490/src/org/jetbrains/plugins/scala/lang/psi/impl/base/patterns/ScTypedPatternImpl.scala
raw: https://raw.githubusercontent.com/JetBrains/intellij-scala/45c86930977278eb445b6aba79ac7011bc418490/src/org/jetbrains/plugins/scala/lang/psi/impl/base/patterns/ScTypedPatternImpl.scala
))
SearchStateExploded(ScalaFile(
project: apache
github: https://github.com/apache/spark/blob/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/sql/core/src/test/scala/org/apache/spark/sql/DataFrameWindowSuite.scala
raw: https://raw.githubusercontent.com/apache/spark/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/sql/core/src/test/scala/org/apache/spark/sql/DataFrameWindowSuite.scala
))
SearchStateExploded(ScalaFile(
project: akka
github: https://github.com/akka/akka/blob/3698928fbdaa8fef8e2037fbc51887ab60addefb/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphZipWithSpec.scala
raw: https://raw.githubusercontent.com/akka/akka/3698928fbdaa8fef8e2037fbc51887ab60addefb/akka-stream-tests/src/test/scala/akka/stream/scaladsl/GraphZipWithSpec.scala
))
SearchStateExploded(ScalaFile(
project: akka
github: https://github.com/akka/akka/blob/3698928fbdaa8fef8e2037fbc51887ab60addefb/project/AkkaBuild.scala
raw: https://raw.githubusercontent.com/akka/akka/3698928fbdaa8fef8e2037fbc51887ab60addefb/project/AkkaBuild.scala
))
SearchStateExploded(ScalaFile(
project: akka
github: https://github.com/akka/akka/blob/3698928fbdaa8fef8e2037fbc51887ab60addefb/project/MiMa.scala
raw: https://raw.githubusercontent.com/akka/akka/3698928fbdaa8fef8e2037fbc51887ab60addefb/project/MiMa.scala
))
SearchStateExploded(ScalaFile(
project: apache
github: https://github.com/apache/spark/blob/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/mllib/src/test/scala/org/apache/spark/mllib/clustering/KMeansSuite.scala
raw: https://raw.githubusercontent.com/apache/spark/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/mllib/src/test/scala/org/apache/spark/mllib/clustering/KMeansSuite.scala
))
SearchStateExploded(ScalaFile(
project: apache
github: https://github.com/apache/spark/blob/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala
raw: https://raw.githubusercontent.com/apache/spark/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala
))
SearchStateExploded(ScalaFile(
project: twitter
github: https://github.com/twitter/scalding/blob/7759d9c399896fc4c54be0f2406e047e030d8586/scalding-db/src/test/scala/com/twitter/scalding/db/macros/MacrosUnitTests.scala
raw: https://raw.githubusercontent.com/twitter/scalding/7759d9c399896fc4c54be0f2406e047e030d8586/scalding-db/src/test/scala/com/twitter/scalding/db/macros/MacrosUnitTests.scala
))
SearchStateExploded(ScalaFile(
project: apache
github: https://github.com/apache/spark/blob/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetPartitionDiscoverySuite.scala
raw: https://raw.githubusercontent.com/apache/spark/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetPartitionDiscoverySuite.scala
))
SearchStateExploded(ScalaFile(
project: apache
github: https://github.com/apache/spark/blob/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetSchemaSuite.scala
raw: https://raw.githubusercontent.com/apache/spark/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetSchemaSuite.scala
))
SearchStateExploded(ScalaFile(
project: apache
github: https://github.com/apache/spark/blob/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/mllib/src/test/scala/org/apache/spark/mllib/linalg/distributed/BlockMatrixSuite.scala
raw: https://raw.githubusercontent.com/apache/spark/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/mllib/src/test/scala/org/apache/spark/mllib/linalg/distributed/BlockMatrixSuite.scala
))
SearchStateExploded(ScalaFile(
project: twitter
github: https://github.com/twitter/scalding/blob/7759d9c399896fc4c54be0f2406e047e030d8586/scalding-serialization/src/main/scala/com/twitter/scalding/serialization/Boxed.scala
raw: https://raw.githubusercontent.com/twitter/scalding/7759d9c399896fc4c54be0f2406e047e030d8586/scalding-serialization/src/main/scala/com/twitter/scalding/serialization/Boxed.scala
))
SearchStateExploded(ScalaFile(
project: apache
github: https://github.com/apache/spark/blob/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/sql/core/src/test/scala/org/apache/spark/sql/sources/TableScanSuite.scala
raw: https://raw.githubusercontent.com/apache/spark/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/sql/core/src/test/scala/org/apache/spark/sql/sources/TableScanSuite.scala
))
SearchStateExploded(ScalaFile(
project: twitter
github: https://github.com/twitter/scalding/blob/7759d9c399896fc4c54be0f2406e047e030d8586/scalding-serialization/src/main/scala/com/twitter/scalding/serialization/macros/impl/ordered_serialization/providers/ProductOrderedBuf.scala
raw: https://raw.githubusercontent.com/twitter/scalding/7759d9c399896fc4c54be0f2406e047e030d8586/scalding-serialization/src/main/scala/com/twitter/scalding/serialization/macros/impl/ordered_serialization/providers/ProductOrderedBuf.scala
))
SearchStateExploded(ScalaFile(
project: apache
github: https://github.com/apache/spark/blob/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/project/MimaExcludes.scala
raw: https://raw.githubusercontent.com/apache/spark/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/project/MimaExcludes.scala
))
SearchStateExploded(ScalaFile(
project: JetBrains
github: https://github.com/JetBrains/intellij-scala/blob/45c86930977278eb445b6aba79ac7011bc418490/src/org/jetbrains/plugins/scala/lang/psi/types/nonvalue/ScMethodType.scala
raw: https://raw.githubusercontent.com/JetBrains/intellij-scala/45c86930977278eb445b6aba79ac7011bc418490/src/org/jetbrains/plugins/scala/lang/psi/types/nonvalue/ScMethodType.scala
))
SearchStateExploded(ScalaFile(
project: apache
github: https://github.com/apache/spark/blob/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/project/SparkBuild.scala
raw: https://raw.githubusercontent.com/apache/spark/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/project/SparkBuild.scala
))
SearchStateExploded(ScalaFile(
project: apache
github: https://github.com/apache/spark/blob/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala
raw: https://raw.githubusercontent.com/apache/spark/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/repl/scala-2.10/src/main/scala/org/apache/spark/repl/SparkILoop.scala
))
SearchStateExploded(ScalaFile(
project: apache
github: https://github.com/apache/spark/blob/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala
raw: https://raw.githubusercontent.com/apache/spark/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveMetastoreCatalog.scala
))
SearchStateExploded(ScalaFile(
project: apache
github: https://github.com/apache/spark/blob/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/core/src/main/scala/org/apache/spark/SparkConf.scala
raw: https://raw.githubusercontent.com/apache/spark/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/core/src/main/scala/org/apache/spark/SparkConf.scala
))
SearchStateExploded(ScalaFile(
project: apache
github: https://github.com/apache/spark/blob/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/CreateTableAsSelect.scala
raw: https://raw.githubusercontent.com/apache/spark/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/CreateTableAsSelect.scala
))
SearchStateExploded(ScalaFile(
project: apache
github: https://github.com/apache/spark/blob/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/sql/hive/src/main/scala/org/apache/spark/sql/hive/test/TestHive.scala
raw: https://raw.githubusercontent.com/apache/spark/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/sql/hive/src/main/scala/org/apache/spark/sql/hive/test/TestHive.scala
))
SearchStateExploded(ScalaFile(
project: apache
github: https://github.com/apache/spark/blob/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/sql/hive/src/test/scala/org/apache/spark/sql/hive/InsertIntoHiveTableSuite.scala
raw: https://raw.githubusercontent.com/apache/spark/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/sql/hive/src/test/scala/org/apache/spark/sql/hive/InsertIntoHiveTableSuite.scala
))
SearchStateExploded(ScalaFile(
project: apache
github: https://github.com/apache/spark/blob/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
raw: https://raw.githubusercontent.com/apache/spark/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
))
SearchStateExploded(ScalaFile(
project: apache
github: https://github.com/apache/spark/blob/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
raw: https://raw.githubusercontent.com/apache/spark/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
))
SearchStateExploded(ScalaFile(
project: apache
github: https://github.com/apache/spark/blob/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/WindowQuerySuite.scala
raw: https://raw.githubusercontent.com/apache/spark/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/WindowQuerySuite.scala
))
SearchStateExploded(ScalaFile(
project: apache
github: https://github.com/apache/spark/blob/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/streaming/src/test/java/org/apache/spark/streaming/api/java/JavaStreamingListenerWrapperSuite.scala
raw: https://raw.githubusercontent.com/apache/spark/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/streaming/src/test/java/org/apache/spark/streaming/api/java/JavaStreamingListenerWrapperSuite.scala
))
SearchStateExploded(ScalaFile(
project: JetBrains
github: https://github.com/JetBrains/intellij-scala/blob/45c86930977278eb445b6aba79ac7011bc418490/src/org/jetbrains/plugins/scala/codeInspection/feature/LanguageFeatureInspection.scala
raw: https://raw.githubusercontent.com/JetBrains/intellij-scala/45c86930977278eb445b6aba79ac7011bc418490/src/org/jetbrains/plugins/scala/codeInspection/feature/LanguageFeatureInspection.scala
))
SearchStateExploded(ScalaFile(
project: apache
github: https://github.com/apache/spark/blob/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/ScalaReflectionSuite.scala
raw: https://raw.githubusercontent.com/apache/spark/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/ScalaReflectionSuite.scala
))
SearchStateExploded(ScalaFile(
project: apache
github: https://github.com/apache/spark/blob/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisErrorSuite.scala
raw: https://raw.githubusercontent.com/apache/spark/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisErrorSuite.scala
))
SearchStateExploded(ScalaFile(
project: apache
github: https://github.com/apache/spark/blob/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/streaming/src/test/scala/org/apache/spark/streaming/scheduler/ReceiverSchedulingPolicySuite.scala
raw: https://raw.githubusercontent.com/apache/spark/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/streaming/src/test/scala/org/apache/spark/streaming/scheduler/ReceiverSchedulingPolicySuite.scala
))
SearchStateExploded(ScalaFile(
project: akka
github: https://github.com/akka/akka/blob/3698928fbdaa8fef8e2037fbc51887ab60addefb/akka-http-core/src/test/scala/akka/http/impl/engine/parsing/RequestParserSpec.scala
raw: https://raw.githubusercontent.com/akka/akka/3698928fbdaa8fef8e2037fbc51887ab60addefb/akka-http-core/src/test/scala/akka/http/impl/engine/parsing/RequestParserSpec.scala
))
SearchStateExploded(ScalaFile(
project: akka
github: https://github.com/akka/akka/blob/3698928fbdaa8fef8e2037fbc51887ab60addefb/akka-http-core/src/test/scala/akka/http/impl/engine/parsing/ResponseParserSpec.scala
raw: https://raw.githubusercontent.com/akka/akka/3698928fbdaa8fef8e2037fbc51887ab60addefb/akka-http-core/src/test/scala/akka/http/impl/engine/parsing/ResponseParserSpec.scala
))
SearchStateExploded(ScalaFile(
project: akka
github: https://github.com/akka/akka/blob/3698928fbdaa8fef8e2037fbc51887ab60addefb/akka-http-core/src/test/scala/akka/http/impl/engine/rendering/ResponseRendererSpec.scala
raw: https://raw.githubusercontent.com/akka/akka/3698928fbdaa8fef8e2037fbc51887ab60addefb/akka-http-core/src/test/scala/akka/http/impl/engine/rendering/ResponseRendererSpec.scala
))
SearchStateExploded(ScalaFile(
project: akka
github: https://github.com/akka/akka/blob/3698928fbdaa8fef8e2037fbc51887ab60addefb/akka-http-tests/src/test/scala/akka/http/scaladsl/marshalling/MarshallingSpec.scala
raw: https://raw.githubusercontent.com/akka/akka/3698928fbdaa8fef8e2037fbc51887ab60addefb/akka-http-tests/src/test/scala/akka/http/scaladsl/marshalling/MarshallingSpec.scala
))
SearchStateExploded(ScalaFile(
project: JetBrains
github: https://github.com/JetBrains/intellij-scala/blob/45c86930977278eb445b6aba79ac7011bc418490/src/org/jetbrains/plugins/scala/lang/completion/ScalaOverrideContributor.scala
raw: https://raw.githubusercontent.com/JetBrains/intellij-scala/45c86930977278eb445b6aba79ac7011bc418490/src/org/jetbrains/plugins/scala/lang/completion/ScalaOverrideContributor.scala
))
SearchStateExploded(ScalaFile(
project: akka
github: https://github.com/akka/akka/blob/3698928fbdaa8fef8e2037fbc51887ab60addefb/akka-remote/src/main/scala/akka/remote/transport/AkkaPduCodec.scala
raw: https://raw.githubusercontent.com/akka/akka/3698928fbdaa8fef8e2037fbc51887ab60addefb/akka-remote/src/main/scala/akka/remote/transport/AkkaPduCodec.scala
))
scala.meta.parsers.ParseException: ) expected but identifier found: 1
ParseErr(ScalaFile(
project: akka
github: https://github.com/akka/akka/blob/3698928fbdaa8fef8e2037fbc51887ab60addefb/akka-actor/src/main/scala/akka/event/Logging.scala
raw: https://raw.githubusercontent.com/akka/akka/3698928fbdaa8fef8e2037fbc51887ab60addefb/akka-actor/src/main/scala/akka/event/Logging.scala
))
org.scalafmt.Error$: 80
ScalaFile(
project: akka
github: https://github.com/akka/akka/blob/3698928fbdaa8fef8e2037fbc51887ab60addefb/akka-actor/src/main/scala/akka/actor/LightArrayRevolverScheduler.scala
raw: https://raw.githubusercontent.com/akka/akka/3698928fbdaa8fef8e2037fbc51887ab60addefb/akka-actor/src/main/scala/akka/actor/LightArrayRevolverScheduler.scala
) org.scalafmt.Error$: .getInt("akka.scheduler.ticks-per-wheel")
- .requiring(ticks ⇒ (ticks & (ticks - 1)) == 0,
- "ticks-per-wheel must be a power of 2")
+ .requiring(ticks ⇒
+ (ticks & (ticks - 1)) == 0, "ticks-per-wheel must be a power of 2")
val TickDuration = config
ScalaFile(
project: apache
github: https://github.com/apache/spark/blob/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/graphx/src/main/scala/org/apache/spark/graphx/impl/VertexRDDImpl.scala
raw: https://raw.githubusercontent.com/apache/spark/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/graphx/src/main/scala/org/apache/spark/graphx/impl/VertexRDDImpl.scala
) org.scalafmt.Error$:
- override def innerJoin[U: ClassTag, VD2: ClassTag](other: RDD[
- (VertexId, U)])(
+ override def innerJoin[U: ClassTag, VD2: ClassTag](other: RDD[(VertexId, U)])(
f: (VertexId, VD, U) => VD2): VertexRDD[VD2] = {
ScalaFile(
project: apache
github: https://github.com/apache/spark/blob/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/graphx/src/main/scala/org/apache/spark/graphx/lib/SVDPlusPlus.scala
raw: https://raw.githubusercontent.com/apache/spark/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/graphx/src/main/scala/org/apache/spark/graphx/lib/SVDPlusPlus.scala
) org.scalafmt.Error$: */
- def run(edges: RDD[Edge[Double]], conf: Conf)
- : (Graph[(Array[Double], Array[Double], Double, Double), Double],
- Double) = {
+ def run(edges: RDD[Edge[Double]],
+ conf: Conf): (Graph[(Array[Double], Array[Double], Double, Double),
+ Double],
+ Double) = {
require(conf.maxIters > 0,
ScalaFile(
project: apache
github: https://github.com/apache/spark/blob/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/graphx/src/test/scala/org/apache/spark/graphx/PregelSuite.scala
raw: https://raw.githubusercontent.com/apache/spark/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/graphx/src/test/scala/org/apache/spark/graphx/PregelSuite.scala
) org.scalafmt.Error$: .fromEdgeTuples(sc.parallelize((1 until n).map(x =>
- (x: VertexId, x + 1: VertexId)),
- 3),
+ (x: VertexId, x + 1: VertexId)), 3),
0)
ScalaFile(
project: apache
github: https://github.com/apache/spark/blob/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ColumnPruningSuite.scala
raw: https://raw.githubusercontent.com/apache/spark/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/ColumnPruningSuite.scala
) org.scalafmt.Error$: val query = Project('b :: Nil, Union(input1 :: input2 :: Nil)).analyze
- val expected = Project(
- 'b :: Nil,
- Union(
- Project('b :: Nil, input1) :: Project('d :: Nil,
- input2) :: Nil)).analyze
+ val expected = Project('b :: Nil,
+ Union(
+ Project('b :: Nil, input1) :: Project(
+ 'd :: Nil,
+ input2) :: Nil)).analyze
comparePlans(Optimize.execute(query), expected)
ScalaFile(
project: apache
github: https://github.com/apache/spark/blob/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/LimitPushdownSuite.scala
raw: https://raw.githubusercontent.com/apache/spark/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/optimizer/LimitPushdownSuite.scala
) org.scalafmt.Error$: val unionOptimized = Optimize.execute(unionQuery.analyze)
- val unionCorrectAnswer =
- Limit(2,
- Union(testRelation.limit(1),
- testRelation2.select('d).limit(1))).analyze
+ val unionCorrectAnswer = Limit(2,
+ Union(testRelation.limit(1),
+ testRelation2
+ .select('d)
+ .limit(1))).analyze
comparePlans(unionOptimized, unionCorrectAnswer)
ScalaFile(
project: scala-js
github: https://github.com/scala-js/scala-js/blob/8663c8060ca96a51bfc1f87f2f3f30babbeadbc3/js-envs/src/main/scala/org/scalajs/jsenv/rhino/RhinoJSEnv.scala
raw: https://raw.githubusercontent.com/scala-js/scala-js/8663c8060ca96a51bfc1f87f2f3f30babbeadbc3/js-envs/src/main/scala/org/scalajs/jsenv/rhino/RhinoJSEnv.scala
) org.scalafmt.Error$: for (channel <- optChannel) {
- comEventLoop(taskQ,
- channel,
- () => recvCallback.get,
- () => recvCallback.isDefined)
+ comEventLoop(taskQ, channel, () => recvCallback.get, () =>
+ recvCallback.isDefined)
}
@@ -317,5 +315,4 @@
- val task = new TimeoutTask(
- deadline,
- () => cb.call(context, scope, scope, args.slice(2, args.length)))
+ val task = new TimeoutTask(deadline, () =>
+ cb.call(context, scope, scope, args.slice(2, args.length)))
ScalaFile(
project: twitter
github: https://github.com/twitter/scalding/blob/7759d9c399896fc4c54be0f2406e047e030d8586/scalding-commons/src/main/scala/com/twitter/scalding/commons/source/VersionedKeyValSource.scala
raw: https://raw.githubusercontent.com/twitter/scalding/7759d9c399896fc4c54be0f2406e047e030d8586/scalding-commons/src/main/scala/com/twitter/scalding/commons/source/VersionedKeyValSource.scala
) org.scalafmt.Error$: @deprecated("This method is deprecated", "0.1.6")
- def this(path: String,
- sourceVersion: Option[Long],
- sinkVersion: Option[Long],
- maxFailures: Int)(
- implicit @transient codec: Injection[(K, V),
- (Array[Byte],
- Array[Byte])]) =
+ def this(
+ path: String,
+ sourceVersion: Option[Long],
+ sinkVersion: Option[Long],
+ maxFailures: Int)(implicit @transient codec: Injection[(K, V),
+ (Array[Byte],
+ Array[Byte])]) =
this(path,
ScalaFile(
project: apache
github: https://github.com/apache/spark/blob/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/sql/core/src/main/scala/org/apache/spark/sql/KeyValueGroupedDataset.scala
raw: https://raw.githubusercontent.com/apache/spark/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/sql/core/src/main/scala/org/apache/spark/sql/KeyValueGroupedDataset.scala
) org.scalafmt.Error$: def keys: Dataset[K] = {
- Dataset[K](sqlContext, Distinct(Project(groupingAttributes, logicalPlan)))
+ Dataset[K](sqlContext,
+ Distinct(Project(groupingAttributes, logicalPlan)))
}
ScalaFile(
project: apache
github: https://github.com/apache/spark/blob/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/mllib/src/main/scala/org/apache/spark/ml/recommendation/ALS.scala
raw: https://raw.githubusercontent.com/apache/spark/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/mllib/src/main/scala/org/apache/spark/ml/recommendation/ALS.scala
) org.scalafmt.Error$: case (activeIndices, dstBlockId) =>
- (dstBlockId,
- (srcBlockId, activeIndices.map(idx => srcFactors(idx))))
+ (dstBlockId, (srcBlockId, activeIndices.map(idx =>
+ srcFactors(idx))))
}
ScalaFile(
project: akka
github: https://github.com/akka/akka/blob/3698928fbdaa8fef8e2037fbc51887ab60addefb/akka-actor-tests/src/test/scala/akka/actor/ActorDSLSpec.scala
raw: https://raw.githubusercontent.com/akka/akka/3698928fbdaa8fef8e2037fbc51887ab60addefb/akka-actor-tests/src/test/scala/akka/actor/ActorDSLSpec.scala
) org.scalafmt.Error$: import system.dispatcher
- val res = Future.sequence(Seq(Future { i.receive() } recover {
- case x ⇒ x
- }, Future {
+ val res = Future.sequence(
+ Seq(Future { i.receive() } recover { case x ⇒ x }, Future {
Thread.sleep(100); i.select() { case "world" ⇒ 1 }
ScalaFile(
project: akka
github: https://github.com/akka/akka/blob/3698928fbdaa8fef8e2037fbc51887ab60addefb/akka-stream/src/main/scala/akka/stream/scaladsl/Graph.scala
raw: https://raw.githubusercontent.com/akka/akka/3698928fbdaa8fef8e2037fbc51887ab60addefb/akka-stream/src/main/scala/akka/stream/scaladsl/Graph.scala
) org.scalafmt.Error$: val dispatchL = dispatch(_: T, other)
- val passR = () ⇒
- emit(out, other, () ⇒ {
- nullOut(); passAlong(right, out, doPull = true)
- })
+ val passR =
+ () ⇒
+ emit(out, other, () ⇒ {
+ nullOut(); passAlong(right, out, doPull = true)
+ })
val passL = () ⇒
ScalaFile(
project: twitter
github: https://github.com/twitter/scalding/blob/7759d9c399896fc4c54be0f2406e047e030d8586/scalding-core/src/main/scala/com/twitter/scalding/GeneratedConversions.scala
raw: https://raw.githubusercontent.com/twitter/scalding/7759d9c399896fc4c54be0f2406e047e030d8586/scalding-core/src/main/scala/com/twitter/scalding/GeneratedConversions.scala
) org.scalafmt.Error$: Tuple21[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U]] =
- new TupleConverter[
- Tuple21[A,
- B,
- C,
- D,
- E,
- F,
- G,
- H,
- I,
- J,
- K,
- L,
- M,
- N,
- O,
- P,
- Q,
- R,
- S,
- T,
- U]] {
+ new TupleConverter[Tuple21[A,
+ B,
+ C,
+ D,
+ E,
+ F,
+ G,
+ H,
+ I,
+ J,
+ K,
+ L,
+ M,
+ N,
+ O,
+ P,
+ Q,
+ R,
+ S,
+ T,
+ U]] {
def apply(te: TupleEntry) = {
ScalaFile(
project: twitter
github: https://github.com/twitter/scalding/blob/7759d9c399896fc4c54be0f2406e047e030d8586/scalding-core/src/main/scala/com/twitter/scalding/GeneratedMappable.scala
raw: https://raw.githubusercontent.com/twitter/scalding/7759d9c399896fc4c54be0f2406e047e030d8586/scalding-core/src/main/scala/com/twitter/scalding/GeneratedMappable.scala
) org.scalafmt.Error$: Tuple20[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T]] {
- def converter[
- Z >: Tuple20[A,
- B,
- C,
- D,
- E,
- F,
- G,
- H,
- I,
- J,
- K,
- L,
- M,
- N,
- O,
- P,
- Q,
- R,
- S,
- T]] =
+ def converter[Z >: Tuple20[A,
+ B,
+ C,
+ D,
+ E,
+ F,
+ G,
+ H,
+ I,
+ J,
+ K,
+ L,
+ M,
+ N,
+ O,
+ P,
+ Q,
+ R,
+ S,
+ T]] =
TupleConverter.asSuperConverter(
@@ -184,24 +183,23 @@
trait Mappable21[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U]
- extends Mappable[
- Tuple21[A,
- B,
- C,
- D,
- E,
- F,
- G,
- H,
- I,
- J,
- K,
- L,
- M,
- N,
- O,
- P,
- Q,
- R,
- S,
- T,
- U]] {
+ extends Mappable[Tuple21[A,
+ B,
+ C,
+ D,
+ E,
+ F,
+ G,
+ H,
+ I,
+ J,
+ K,
+ L,
+ M,
+ N,
+ O,
+ P,
+ Q,
+ R,
+ S,
+ T,
+ U]] {
def converter[
ScalaFile(
project: JetBrains
github: https://github.com/JetBrains/intellij-scala/blob/45c86930977278eb445b6aba79ac7011bc418490/src/org/jetbrains/plugins/scala/lang/psi/api/InferUtil.scala
raw: https://raw.githubusercontent.com/JetBrains/intellij-scala/45c86930977278eb445b6aba79ac7011bc418490/src/org/jetbrains/plugins/scala/lang/psi/api/InferUtil.scala
) org.scalafmt.Error$: None)
- ScTypePolymorphicType(retType, typeParams.map(tp => {
- var lower = tp.lowerType()
- var upper = tp.upperType()
- def hasRecursiveTypeParameters(typez: ScType): Boolean = {
- var hasRecursiveTypeParameters = false
- typez.recursiveUpdate {
- case tpt: ScTypeParameterType =>
- typeParams.find(
- tp =>
- (tp.name,
- ScalaPsiUtil
- .getPsiElementId(tp.ptp)) == (tpt.name, tpt.getId)) match {
- case None => (true, tpt)
- case _ =>
- hasRecursiveTypeParameters = true
- (true, tpt)
+ ScTypePolymorphicType(
+ retType,
+ typeParams
+ .map(tp => {
+ var lower = tp.lowerType()
+ var upper = tp.upperType()
+ def hasRecursiveTypeParameters(typez: ScType): Boolean = {
+ var hasRecursiveTypeParameters = false
+ typez.recursiveUpdate {
+ case tpt: ScTypeParameterType =>
+ typeParams
+ .find(
+ tp =>
+ (tp.name,
+ ScalaPsiUtil
+ .getPsiElementId(tp.ptp)) == (tpt.name, tpt.getId)) match {
+ case None => (true, tpt)
+ case _ =>
+ hasRecursiveTypeParameters = true
+ (true, tpt)
+ }
+ case tp: ScType => (hasRecursiveTypeParameters, tp)
+ }
+ hasRecursiveTypeParameters
}
- case tp: ScType => (hasRecursiveTypeParameters, tp)
- }
- hasRecursiveTypeParameters
- }
- subst.lMap.get((tp.name,
- ScalaPsiUtil.getPsiElementId(tp.ptp))) match {
- case Some(_addLower) =>
- val substedLowerType = unSubst.subst(lower)
- val addLower =
- if (tp.typeParams.nonEmpty && !_addLower
- .isInstanceOf[ScParameterizedType] &&
- !tp.typeParams.exists(_.name == "_"))
- ScParameterizedType(
- _addLower,
- tp.typeParams.map(
- ScTypeParameterType.toTypeParameterType))
- else _addLower
- if (hasRecursiveTypeParameters(substedLowerType))
- lower = addLower
- else lower = Bounds.lub(substedLowerType, addLower)
- case None =>
- lower = unSubst.subst(lower)
- }
- subst.rMap.get((tp.name,
- ScalaPsiUtil.getPsiElementId(tp.ptp))) match {
- case Some(_addUpper) =>
- val substedUpperType = unSubst.subst(upper)
- val addUpper =
- if (tp.typeParams.nonEmpty && !_addUpper
- .isInstanceOf[ScParameterizedType] &&
- !tp.typeParams.exists(_.name == "_"))
- ScParameterizedType(
- _addUpper,
- tp.typeParams.map(
- ScTypeParameterType.toTypeParameterType))
- else _addUpper
- if (hasRecursiveTypeParameters(substedUpperType))
- upper = addUpper
- else upper = Bounds.glb(substedUpperType, addUpper)
- case None =>
- upper = unSubst.subst(upper)
- }
+ subst.lMap.get((tp.name,
+ ScalaPsiUtil
+ .getPsiElementId(tp.ptp))) match {
+ case Some(_addLower) =>
+ val substedLowerType = unSubst.subst(lower)
+ val addLower =
+ if (tp.typeParams.nonEmpty && !_addLower
+ .isInstanceOf[ScParameterizedType] &&
+ !tp.typeParams.exists(_.name == "_"))
+ ScParameterizedType(
+ _addLower,
+ tp.typeParams.map(
+ ScTypeParameterType.toTypeParameterType))
+ else _addLower
+ if (hasRecursiveTypeParameters(substedLowerType))
+ lower = addLower
+ else lower = Bounds.lub(substedLowerType, addLower)
+ case None =>
+ lower = unSubst.subst(lower)
+ }
+ subst.rMap.get((tp.name,
+ ScalaPsiUtil
+ .getPsiElementId(tp.ptp))) match {
+ case Some(_addUpper) =>
+ val substedUpperType = unSubst.subst(upper)
+ val addUpper =
+ if (tp.typeParams.nonEmpty && !_addUpper
+ .isInstanceOf[ScParameterizedType] &&
+ !tp.typeParams.exists(_.name == "_"))
+ ScParameterizedType(
+ _addUpper,
+ tp.typeParams.map(
+ ScTypeParameterType.toTypeParameterType))
+ else _addUpper
+ if (hasRecursiveTypeParameters(substedUpperType))
+ upper = addUpper
+ else upper = Bounds.glb(substedUpperType, addUpper)
+ case None =>
+ upper = unSubst.subst(upper)
+ }
- if (safeCheck && !undefiningSubstitutor
- .subst(lower)
- .conforms(undefiningSubstitutor.subst(upper),
- checkWeak = true))
- throw new SafeCheckException
- TypeParameter(tp.name,
- tp.typeParams /* doesn't important here */,
- () => lower,
- () => upper,
- tp.ptp)
- }))
+ if (safeCheck && !undefiningSubstitutor
+ .subst(lower)
+ .conforms(undefiningSubstitutor.subst(upper),
+ checkWeak = true))
+ throw new SafeCheckException
+ TypeParameter(tp.name,
+ tp.typeParams /* doesn't important here */,
+ () => lower,
+ () => upper,
+ tp.ptp)
+ }))
} else {
ScalaFile(
project: twitter
github: https://github.com/twitter/scalding/blob/7759d9c399896fc4c54be0f2406e047e030d8586/scalding-core/src/main/scala/com/twitter/scalding/GeneratedTupleAdders.scala
raw: https://raw.githubusercontent.com/twitter/scalding/7759d9c399896fc4c54be0f2406e047e030d8586/scalding-core/src/main/scala/com/twitter/scalding/GeneratedTupleAdders.scala
) org.scalafmt.Error$: class Tuple20Adder[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S,
- T](
- tup: Tuple20[A,
- B,
- C,
- D,
- E,
- F,
- G,
- H,
- I,
- J,
- K,
- L,
- M,
- N,
- O,
- P,
- Q,
- R,
- S,
- T]) {
+ T](tup: Tuple20[A,
+ B,
+ C,
+ D,
+ E,
+ F,
+ G,
+ H,
+ I,
+ J,
+ K,
+ L,
+ M,
+ N,
+ O,
+ P,
+ Q,
+ R,
+ S,
+ T]) {
def :+[U](other: U) = {
@@ -4975,23 +4974,22 @@
S,
- T](
- tup: Tuple20[A,
- B,
- C,
- D,
- E,
- F,
- G,
- H,
- I,
- J,
- K,
- L,
- M,
- N,
- O,
- P,
- Q,
- R,
- S,
- T]) = new Tuple20Adder(tup)
+ T](tup: Tuple20[A,
+ B,
+ C,
+ D,
+ E,
+ F,
+ G,
+ H,
+ I,
+ J,
+ K,
+ L,
+ M,
+ N,
+ O,
+ P,
+ Q,
+ R,
+ S,
+ T]) = new Tuple20Adder(tup)
ScalaFile(
project: akka
github: https://github.com/akka/akka/blob/3698928fbdaa8fef8e2037fbc51887ab60addefb/akka-actor-tests/src/test/scala/akka/dispatch/FutureSpec.scala
raw: https://raw.githubusercontent.com/akka/akka/3698928fbdaa8fef8e2037fbc51887ab60addefb/akka-actor-tests/src/test/scala/akka/dispatch/FutureSpec.scala
) org.scalafmt.Error$: (intercept[java.lang.Exception] {
- Await.result(
- future flatMap (_ ⇒ Promise.successful[Any]("foo").future),
- timeout.duration)
+ Await.result(future flatMap (_ ⇒
+ Promise.successful[Any]("foo").future), timeout.duration)
}).getMessage should ===(message))
ScalaFile(
project: akka
github: https://github.com/akka/akka/blob/3698928fbdaa8fef8e2037fbc51887ab60addefb/akka-stream-tests/src/test/scala/akka/stream/impl/fusing/InterpreterSupervisionSpec.scala
raw: https://raw.githubusercontent.com/akka/akka/3698928fbdaa8fef8e2037fbc51887ab60addefb/akka-stream-tests/src/test/scala/akka/stream/impl/fusing/InterpreterSupervisionSpec.scala
) org.scalafmt.Error$: "resume when Scan throws" in new OneBoundedSetup[Int](
- Seq(Scan(1,
- (acc: Int, x: Int) ⇒ if (x == 10) throw TE else acc + x,
- resumingDecider))) {
+ Seq(Scan(1, (acc: Int, x: Int) ⇒
+ if (x == 10) throw TE else acc + x, resumingDecider))) {
downstream.requestOne()
@@ -359,5 +358,4 @@
"restart when Scan throws" in new OneBoundedSetup[Int](
- Seq(Scan(1,
- (acc: Int, x: Int) ⇒ if (x == 10) throw TE else acc + x,
- restartingDecider))) {
+ Seq(Scan(1, (acc: Int, x: Int) ⇒
+ if (x == 10) throw TE else acc + x, restartingDecider))) {
downstream.requestOne()
ScalaFile(
project: apache
github: https://github.com/apache/spark/blob/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/CatalystReadSupport.scala
raw: https://raw.githubusercontent.com/apache/spark/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/parquet/CatalystReadSupport.scala
) org.scalafmt.Error$: case t: MapType =>
- t.copy(keyType = expand(t.keyType), valueType = expand(t.valueType))
+ t.copy(keyType = expand(t.keyType),
+ valueType = expand(t.valueType))
ScalaFile(
project: scala-js
github: https://github.com/scala-js/scala-js/blob/8663c8060ca96a51bfc1f87f2f3f30babbeadbc3/scalalib/overrides-2.10/scala/collection/immutable/Range.scala
raw: https://raw.githubusercontent.com/scala-js/scala-js/8663c8060ca96a51bfc1f87f2f3f30babbeadbc3/scalalib/overrides-2.10/scala/collection/immutable/Range.scala
) org.scalafmt.Error$: */
- final override def drop(n: Int): Range =
- (if (n <= 0 || isEmpty) this
- else if (n >= numRangeElements)
- newEmptyRange(end)
- else
- copy(locationAfterN(n),
- end,
- step))
+ final override def drop(n: Int): Range = (if (n <= 0 || isEmpty) this
+ else if (n >= numRangeElements)
+ newEmptyRange(end)
+ else
+ copy(locationAfterN(n),
+ end,
+ step))
ScalaFile(
project: scala-js
github: https://github.com/scala-js/scala-js/blob/8663c8060ca96a51bfc1f87f2f3f30babbeadbc3/scalalib/overrides-2.11/scala/collection/immutable/Range.scala
raw: https://raw.githubusercontent.com/scala-js/scala-js/8663c8060ca96a51bfc1f87f2f3f30babbeadbc3/scalalib/overrides-2.11/scala/collection/immutable/Range.scala
) org.scalafmt.Error$: */
- final override def drop(n: Int): Range =
- (if (n <= 0 || isEmpty) this
- else if (n >= numRangeElements && numRangeElements >= 0)
- newEmptyRange(end)
- else {
- // May have more than Int.MaxValue elements (numRangeElements < 0)
- // but the logic is the same either way: go forwards n steps, keep the rest
- copy(locationAfterN(n),
- end,
- step)
- })
+ final override def drop(n: Int): Range = (if (n <= 0 || isEmpty) this
+ else if (n >= numRangeElements && numRangeElements >= 0)
+ newEmptyRange(end)
+ else {
+ // May have more than Int.MaxValue elements (numRangeElements < 0)
+ // but the logic is the same either way: go forwards n steps, keep the rest
+ copy(locationAfterN(n),
+ end,
+ step)
+ })
ScalaFile(
project: scala-js
github: https://github.com/scala-js/scala-js/blob/8663c8060ca96a51bfc1f87f2f3f30babbeadbc3/scalalib/overrides-2.12/scala/collection/immutable/Range.scala
raw: https://raw.githubusercontent.com/scala-js/scala-js/8663c8060ca96a51bfc1f87f2f3f30babbeadbc3/scalalib/overrides-2.12/scala/collection/immutable/Range.scala
) org.scalafmt.Error$: */
- final override def drop(n: Int): Range =
- (if (n <= 0 || isEmpty) this
- else if (n >= numRangeElements && numRangeElements >= 0)
- newEmptyRange(end)
- else {
- // May have more than Int.MaxValue elements (numRangeElements < 0)
- // but the logic is the same either way: go forwards n steps, keep the rest
- copy(locationAfterN(n),
- end,
- step)
- })
+ final override def drop(n: Int): Range = (if (n <= 0 || isEmpty) this
+ else if (n >= numRangeElements && numRangeElements >= 0)
+ newEmptyRange(end)
+ else {
+ // May have more than Int.MaxValue elements (numRangeElements < 0)
+ // but the logic is the same either way: go forwards n steps, keep the rest
+ copy(locationAfterN(n),
+ end,
+ step)
+ })
ScalaFile(
project: scala-js
github: https://github.com/scala-js/scala-js/blob/8663c8060ca96a51bfc1f87f2f3f30babbeadbc3/test-interface/src/main/scala/org/scalajs/testinterface/internal/BridgeBase.scala
raw: https://raw.githubusercontent.com/scala-js/scala-js/8663c8060ca96a51bfc1f87f2f3f30babbeadbc3/test-interface/src/main/scala/org/scalajs/testinterface/internal/BridgeBase.scala
) org.scalafmt.Error$: tasks.map { task =>
- val serTask = runner.serializeTask(
- task,
- taskDef => js.JSON.stringify(TaskDefSerializer.serialize(taskDef)))
+ val serTask = runner.serializeTask(task, taskDef =>
+ js.JSON.stringify(TaskDefSerializer.serialize(taskDef)))
ScalaFile(
project: twitter
github: https://github.com/twitter/scalding/blob/7759d9c399896fc4c54be0f2406e047e030d8586/scalding-core/src/main/scala/com/twitter/scalding/typed/GeneratedTypedSource.scala
raw: https://raw.githubusercontent.com/twitter/scalding/7759d9c399896fc4c54be0f2406e047e030d8586/scalding-core/src/main/scala/com/twitter/scalding/typed/GeneratedTypedSource.scala
) org.scalafmt.Error$: Tuple20[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T]] {
- def converter[
- Z >: Tuple20[A,
- B,
- C,
- D,
- E,
- F,
- G,
- H,
- I,
- J,
- K,
- L,
- M,
- N,
- O,
- P,
- Q,
- R,
- S,
- T]] =
+ def converter[Z >: Tuple20[A,
+ B,
+ C,
+ D,
+ E,
+ F,
+ G,
+ H,
+ I,
+ J,
+ K,
+ L,
+ M,
+ N,
+ O,
+ P,
+ Q,
+ R,
+ S,
+ T]] =
TupleConverter.asSuperConverter(
@@ -187,24 +186,23 @@
A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U]
- extends TypedSource[
- Tuple21[A,
- B,
- C,
- D,
- E,
- F,
- G,
- H,
- I,
- J,
- K,
- L,
- M,
- N,
- O,
- P,
- Q,
- R,
- S,
- T,
- U]] {
+ extends TypedSource[Tuple21[A,
+ B,
+ C,
+ D,
+ E,
+ F,
+ G,
+ H,
+ I,
+ J,
+ K,
+ L,
+ M,
+ N,
+ O,
+ P,
+ Q,
+ R,
+ S,
+ T,
+ U]] {
def converter[
@@ -461,23 +459,22 @@
Tuple20[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T]] {
- final def setter[
- Z <: Tuple20[A,
- B,
- C,
- D,
- E,
- F,
- G,
- H,
- I,
- J,
- K,
- L,
- M,
- N,
- O,
- P,
- Q,
- R,
- S,
- T]] =
+ final def setter[Z <: Tuple20[A,
+ B,
+ C,
+ D,
+ E,
+ F,
+ G,
+ H,
+ I,
+ J,
+ K,
+ L,
+ M,
+ N,
+ O,
+ P,
+ Q,
+ R,
+ S,
+ T]] =
TupleSetter.asSubSetter(
@@ -507,24 +504,23 @@
A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U]
- extends TypedSink[
- Tuple21[A,
- B,
- C,
- D,
- E,
- F,
- G,
- H,
- I,
- J,
- K,
- L,
- M,
- N,
- O,
- P,
- Q,
- R,
- S,
- T,
- U]] {
+ extends TypedSink[Tuple21[A,
+ B,
+ C,
+ D,
+ E,
+ F,
+ G,
+ H,
+ I,
+ J,
+ K,
+ L,
+ M,
+ N,
+ O,
+ P,
+ Q,
+ R,
+ S,
+ T,
+ U]] {
final def setter[
ScalaFile(
project: JetBrains
github: https://github.com/JetBrains/intellij-scala/blob/45c86930977278eb445b6aba79ac7011bc418490/src/org/jetbrains/plugins/scala/lang/psi/impl/expr/ScGenericCallImpl.scala
raw: https://raw.githubusercontent.com/JetBrains/intellij-scala/45c86930977278eb445b6aba79ac7011bc418490/src/org/jetbrains/plugins/scala/lang/psi/impl/expr/ScGenericCallImpl.scala
) org.scalafmt.Error$: case ScTypePolymorphicType(int, tps) =>
- val subst = ScalaPsiUtil.genericCallSubstitutor(
- tps.map(p => (p.name, ScalaPsiUtil.getPsiElementId(p.ptp))),
- this)
+ val subst = ScalaPsiUtil.genericCallSubstitutor(tps.map(p =>
+ (p.name, ScalaPsiUtil.getPsiElementId(p.ptp))), this)
Success(subst.subst(int), Some(this))
@@ -103,5 +102,4 @@
case ScTypePolymorphicType(int, tps) =>
- val subst = ScalaPsiUtil.genericCallSubstitutor(
- tps.map(p => (p.name, ScalaPsiUtil.getPsiElementId(p.ptp))),
- this)
+ val subst = ScalaPsiUtil.genericCallSubstitutor(tps.map(p =>
+ (p.name, ScalaPsiUtil.getPsiElementId(p.ptp))), this)
Success(subst.subst(int), Some(this))
ScalaFile(
project: twitter
github: https://github.com/twitter/scalding/blob/7759d9c399896fc4c54be0f2406e047e030d8586/scalding-core/src/test/scala/com/twitter/scalding/CoreTest.scala
raw: https://raw.githubusercontent.com/twitter/scalding/7759d9c399896fc4c54be0f2406e047e030d8586/scalding-core/src/test/scala/com/twitter/scalding/CoreTest.scala
) org.scalafmt.Error$: JobTest(new SortingJob(_))
- .source(Tsv("in", ('x, 'y, 'z)),
- (1 to 100).map(i => (i, i * i % 5, i * i * i)))
+ .source(Tsv("in", ('x, 'y, 'z)), (1 to 100).map(i =>
+ (i, i * i % 5, i * i * i)))
.sink[(Int, Int, Int)](Tsv("output")) { outBuf =>
ScalaFile(
project: JetBrains
github: https://github.com/JetBrains/intellij-scala/blob/45c86930977278eb445b6aba79ac7011bc418490/src/org/jetbrains/plugins/scala/lang/psi/impl/toplevel/typedef/TypeDefinitionMembers.scala
raw: https://raw.githubusercontent.com/JetBrains/intellij-scala/45c86930977278eb445b6aba79ac7011bc418490/src/org/jetbrains/plugins/scala/lang/psi/impl/toplevel/typedef/TypeDefinitionMembers.scala
) org.scalafmt.Error$: addSignature(
- new Signature("set" + dcl.name.capitalize,
- Seq(() => t),
- 1,
- subst,
- dcl))
+ new Signature("set" + dcl.name.capitalize, Seq(() =>
+ t), 1, subst, dcl))
}
@@ -532,7 +529,4 @@
addSignature(
- new Signature("set" + param.name.capitalize,
- Seq(() => t),
- 1,
- subst,
- param))
+ new Signature("set" + param.name.capitalize, Seq(() =>
+ t), 1, subst, param))
}
@@ -547,7 +541,4 @@
addSignature(
- new Signature("set" + param.name.capitalize,
- Seq(() => t),
- 1,
- subst,
- param))
+ new Signature("set" + param.name.capitalize, Seq(() =>
+ t), 1, subst, param))
}
ScalaFile(
project: twitter
github: https://github.com/twitter/scalding/blob/7759d9c399896fc4c54be0f2406e047e030d8586/scalding-core/src/test/scala/com/twitter/scalding/TypedPipeTest.scala
raw: https://raw.githubusercontent.com/twitter/scalding/7759d9c399896fc4c54be0f2406e047e030d8586/scalding-core/src/test/scala/com/twitter/scalding/TypedPipeTest.scala
) org.scalafmt.Error$: generateInput(1000, 100, dist))
- .source(TypedText.tsv[(Int, Int)]("input1"),
- generateInput(100, 100, x => 1))
+ .source(TypedText.tsv[(Int, Int)]("input1"), generateInput(100, 100, x =>
+ 1))
.typedSink(TypedText.tsv[(Int, Int, Int)]("output-sketch")) { outBuf =>
ScalaFile(
project: JetBrains
github: https://github.com/JetBrains/intellij-scala/blob/45c86930977278eb445b6aba79ac7011bc418490/src/org/jetbrains/plugins/scala/lang/psi/implicits/ImplicitCollector.scala
raw: https://raw.githubusercontent.com/JetBrains/intellij-scala/45c86930977278eb445b6aba79ac7011bc418490/src/org/jetbrains/plugins/scala/lang/psi/implicits/ImplicitCollector.scala
) org.scalafmt.Error$: if (fullInfo)
- return (candidates.toSeq.map(c =>
- forMap(c,
- withLocalTypeInference = false,
- checkFast = false)) ++
+ return (candidates.toSeq
+ .map(c =>
+ forMap(c,
+ withLocalTypeInference = false,
+ checkFast = false)) ++
candidates.toSeq.map(c =>
ScalaFile(
project: akka
github: https://github.com/akka/akka/blob/3698928fbdaa8fef8e2037fbc51887ab60addefb/akka-typed/src/main/scala/akka/typed/patterns/Receiver.scala
raw: https://raw.githubusercontent.com/akka/akka/3698928fbdaa8fef8e2037fbc51887ab60addefb/akka-typed/src/main/scala/akka/typed/patterns/Receiver.scala
) org.scalafmt.Error$: SynchronousSelf { syncself ⇒
- Or(empty(ctx).widen {
- case c: Command[t] ⇒ c.asInstanceOf[Command[T]]
- }, Static[Any] {
- case msg ⇒ syncself ! Enqueue(msg)
- })
+ Or(
+ empty(ctx).widen {
+ case c: Command[t] ⇒ c.asInstanceOf[Command[T]]
+ },
+ Static[Any] {
+ case msg ⇒ syncself ! Enqueue(msg)
+ })
}
ScalaFile(
project: akka
github: https://github.com/akka/akka/blob/3698928fbdaa8fef8e2037fbc51887ab60addefb/akka-typed/src/test/scala/akka/typed/ActorContextSpec.scala
raw: https://raw.githubusercontent.com/akka/akka/3698928fbdaa8fef8e2037fbc51887ab60addefb/akka-typed/src/test/scala/akka/typed/ActorContextSpec.scala
) org.scalafmt.Error$: def `04 must stop a child actor`(): Unit =
- sync(setup("ctx04") { (ctx, startWith) ⇒
- val self = ctx.self
- startWith
- .mkChild(Some("A"),
- ctx.spawnAdapter(ChildEvent),
- self,
- inert = true) {
- case (subj, child) ⇒
- subj ! Kill(child, self)
- child
- }
- .expectMessageKeep(500.millis) { (msg, child) ⇒
- msg should ===(Killed)
- ctx.watch(child)
- }
- .expectTermination(500.millis) { (t, child) ⇒
- t.ref should ===(child)
- }
+ sync(setup("ctx04") {
+ (ctx, startWith) ⇒
+ val self = ctx.self
+ startWith
+ .mkChild(Some("A"),
+ ctx.spawnAdapter(ChildEvent),
+ self,
+ inert = true) {
+ case (subj, child) ⇒
+ subj ! Kill(child, self)
+ child
+ }
+ .expectMessageKeep(500.millis) { (msg, child) ⇒
+ msg should ===(Killed)
+ ctx.watch(child)
+ }
+ .expectTermination(500.millis) { (t, child) ⇒
+ t.ref should ===(child)
+ }
})
ScalaFile(
project: JetBrains
github: https://github.com/JetBrains/intellij-scala/blob/45c86930977278eb445b6aba79ac7011bc418490/src/org/jetbrains/plugins/scala/lang/psi/types/BaseTypes.scala
raw: https://raw.githubusercontent.com/JetBrains/intellij-scala/45c86930977278eb445b6aba79ac7011bc418490/src/org/jetbrains/plugins/scala/lang/psi/types/BaseTypes.scala
) org.scalafmt.Error$: if (visitedAliases.contains(ta)) return Seq.empty
- val genericSubst = ScalaPsiUtil.typesCallSubstitutor(
- ta.typeParameters.map(tp =>
- (tp.name, ScalaPsiUtil.getPsiElementId(tp))),
- args)
+ val genericSubst =
+ ScalaPsiUtil.typesCallSubstitutor(ta.typeParameters.map(tp =>
+ (tp.name, ScalaPsiUtil.getPsiElementId(tp))), args)
BaseTypes.get(
ScalaFile(
project: twitter
github: https://github.com/twitter/scalding/blob/7759d9c399896fc4c54be0f2406e047e030d8586/scalding-hadoop-test/src/test/scala/com/twitter/scalding/reducer_estimation/RatioBasedEstimatorTest.scala
raw: https://raw.githubusercontent.com/twitter/scalding/7759d9c399896fc4c54be0f2406e047e030d8586/scalding-hadoop-test/src/test/scala/com/twitter/scalding/reducer_estimation/RatioBasedEstimatorTest.scala
) org.scalafmt.Error$: // all entries below the 10% threshold for past input size
- Success(Seq(makeHistory(10, 1), makeHistory(10, 1), makeHistory(10, 1)))
+ Success(
+ Seq(makeHistory(10, 1),
+ makeHistory(10, 1),
+ makeHistory(10, 1)))
}
ScalaFile(
project: JetBrains
github: https://github.com/JetBrains/intellij-scala/blob/45c86930977278eb445b6aba79ac7011bc418490/src/org/jetbrains/plugins/scala/lang/psi/types/Conformance.scala
raw: https://raw.githubusercontent.com/JetBrains/intellij-scala/45c86930977278eb445b6aba79ac7011bc418490/src/org/jetbrains/plugins/scala/lang/psi/types/Conformance.scala
) org.scalafmt.Error$: val uBound = subst.subst(upper)
- val genericSubst = ScalaPsiUtil.typesCallSubstitutor(
- a.typeParameters.map(tp =>
- (tp.name, ScalaPsiUtil.getPsiElementId(tp))),
- args)
+ val genericSubst =
+ ScalaPsiUtil.typesCallSubstitutor(a.typeParameters.map(tp =>
+ (tp.name, ScalaPsiUtil.getPsiElementId(tp))), args)
val s = subst.followed(genericSubst)
@@ -375,6 +374,5 @@
}
- val genericSubst = ScalaPsiUtil.typesCallSubstitutor(
- a.typeParameters.map(tp =>
- (tp.name, ScalaPsiUtil.getPsiElementId(tp))),
- args)
+ val genericSubst =
+ ScalaPsiUtil.typesCallSubstitutor(a.typeParameters.map(tp =>
+ (tp.name, ScalaPsiUtil.getPsiElementId(tp))), args)
result = conformsInner(l,
@@ -1120,6 +1118,5 @@
val lBound = subst.subst(lower)
- val genericSubst = ScalaPsiUtil.typesCallSubstitutor(
- a.typeParameters.map(tp =>
- (tp.name, ScalaPsiUtil.getPsiElementId(tp))),
- args)
+ val genericSubst =
+ ScalaPsiUtil.typesCallSubstitutor(a.typeParameters.map(tp =>
+ (tp.name, ScalaPsiUtil.getPsiElementId(tp))), args)
val s = subst.followed(genericSubst)
@@ -1145,6 +1142,5 @@
val lBound = lower
- val genericSubst = ScalaPsiUtil.typesCallSubstitutor(
- a.typeParameters.map(tp =>
- (tp.name, ScalaPsiUtil.getPsiElementId(tp))),
- args)
+ val genericSubst =
+ ScalaPsiUtil.typesCallSubstitutor(a.typeParameters.map(tp =>
+ (tp.name, ScalaPsiUtil.getPsiElementId(tp))), args)
result = conformsInner(genericSubst.subst(lBound),
@@ -1993,27 +1989,28 @@
val subst =
- new ScSubstitutor(
- new collection.immutable.HashMap[(String, PsiElement),
- ScType] ++ typeParameters1
- .zip(typeParameters2)
- .map({ tuple =>
- ((tuple._1.name,
- ScalaPsiUtil.getPsiElementId(tuple._1.ptp)),
- new ScTypeParameterType(tuple._2.name,
- tuple._2.ptp match {
- case p: ScTypeParam =>
- p.typeParameters.toList.map {
- new ScTypeParameterType(
- _,
- ScSubstitutor.empty)
- }
- case _ => Nil
- },
- new Suspension(
- tuple._2.lowerType),
- new Suspension(
- tuple._2.upperType),
- tuple._2.ptp))
- }),
- Map.empty,
- None)
+ new ScSubstitutor(new collection.immutable.HashMap[
+ (String, PsiElement),
+ ScType] ++ typeParameters1
+ .zip(typeParameters2)
+ .map({
+ tuple =>
+ ((tuple._1.name,
+ ScalaPsiUtil.getPsiElementId(
+ tuple._1.ptp)),
+ new ScTypeParameterType(
+ tuple._2.name,
+ tuple._2.ptp match {
+ case p: ScTypeParam =>
+ p.typeParameters.toList.map {
+ new ScTypeParameterType(
+ _,
+ ScSubstitutor.empty)
+ }
+ case _ => Nil
+ },
+ new Suspension(tuple._2.lowerType),
+ new Suspension(tuple._2.upperType),
+ tuple._2.ptp))
+ }),
+ Map.empty,
+ None)
val t = conformsInner(subst.subst(internalType1),
ScalaFile(
project: JetBrains
github: https://github.com/JetBrains/intellij-scala/blob/45c86930977278eb445b6aba79ac7011bc418490/src/org/jetbrains/plugins/scala/lang/psi/types/ScCompoundType.scala
raw: https://raw.githubusercontent.com/JetBrains/intellij-scala/45c86930977278eb445b6aba79ac7011bc418490/src/org/jetbrains/plugins/scala/lang/psi/types/ScCompoundType.scala
) org.scalafmt.Error$: varType.getOrAny))
- signatureMapVal += ((new Signature(e.name + "_=",
- Seq(() => varType.getOrAny),
- 1,
- subst,
- e),
+ signatureMapVal += ((new Signature(e.name + "_=", Seq(() =>
+ varType.getOrAny), 1, subst, e),
psi.types.Unit)) //setter
ScalaFile(
project: twitter
github: https://github.com/twitter/scalding/blob/7759d9c399896fc4c54be0f2406e047e030d8586/scalding-serialization/src/main/scala/com/twitter/scalding/serialization/Reader.scala
raw: https://raw.githubusercontent.com/twitter/scalding/7759d9c399896fc4c54be0f2406e047e030d8586/scalding-serialization/src/main/scala/com/twitter/scalding/serialization/Reader.scala
) org.scalafmt.Error$:
- implicit def array[
- @specialized(Boolean,
- Byte,
- Short,
- Int,
- Long,
- Float,
- Double) T: Reader: ClassTag]: Reader[Array[T]] =
+ implicit def array[@specialized(
+ Boolean,
+ Byte,
+ Short,
+ Int,
+ Long,
+ Float,
+ Double) T: Reader: ClassTag]: Reader[Array[T]] =
new Reader[Array[T]] {
ScalaFile(
project: JetBrains
github: https://github.com/JetBrains/intellij-scala/blob/45c86930977278eb445b6aba79ac7011bc418490/src/org/jetbrains/plugins/scala/lang/psi/types/ScParameterizedType.scala
raw: https://raw.githubusercontent.com/JetBrains/intellij-scala/45c86930977278eb445b6aba79ac7011bc418490/src/org/jetbrains/plugins/scala/lang/psi/types/ScParameterizedType.scala
) org.scalafmt.Error$: case ScParameterizedType(ScDesignatorType(ta: ScTypeAlias), args) =>
- val genericSubst = ScalaPsiUtil.typesCallSubstitutor(
- ta.typeParameters.map(tp =>
- (tp.name, ScalaPsiUtil.getPsiElementId(tp))),
- args)
+ val genericSubst =
+ ScalaPsiUtil.typesCallSubstitutor(ta.typeParameters.map(tp =>
+ (tp.name, ScalaPsiUtil.getPsiElementId(tp))), args)
Some(
@@ -116,6 +115,5 @@
val subst: ScSubstitutor = p.actualSubst
- val genericSubst = ScalaPsiUtil.typesCallSubstitutor(
- ta.typeParameters.map(tp =>
- (tp.name, ScalaPsiUtil.getPsiElementId(tp))),
- args)
+ val genericSubst =
+ ScalaPsiUtil.typesCallSubstitutor(ta.typeParameters.map(tp =>
+ (tp.name, ScalaPsiUtil.getPsiElementId(tp))), args)
val s = subst.followed(genericSubst)
@@ -418,13 +416,12 @@
case _ =>
- new Suspension[ScType]({
- () =>
- s.subst(
- ScCompoundType(ptp.getExtendsListTypes
+ new Suspension[ScType]({ () =>
+ s.subst(
+ ScCompoundType(ptp.getExtendsListTypes
+ .map(ScType.create(_, ptp.getProject))
+ .toSeq ++
+ ptp.getImplementsListTypes
.map(ScType.create(_, ptp.getProject))
- .toSeq ++
- ptp.getImplementsListTypes
- .map(ScType.create(_, ptp.getProject))
- .toSeq,
- Map.empty,
- Map.empty))
+ .toSeq,
+ Map.empty,
+ Map.empty))
})
ScalaFile(
project: apache
github: https://github.com/apache/spark/blob/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala
raw: https://raw.githubusercontent.com/apache/spark/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/sql/core/src/test/scala/org/apache/spark/sql/jdbc/JDBCSuite.scala
) org.scalafmt.Error$: assert(
- doCompileFilter(
- LessThan("col3",
- Timestamp.valueOf("1995-11-21 00:00:00.0"))) === "col3 < '1995-11-21 00:00:00.0'")
+ doCompileFilter(LessThan(
+ "col3",
+ Timestamp.valueOf("1995-11-21 00:00:00.0"))) === "col3 < '1995-11-21 00:00:00.0'")
assert(
ScalaFile(
project: lihaoyi
github: https://github.com/lihaoyi/fastparse/blob/b445fb5c7003983cec379a4a3b452a9adc3066eb/scalaparse/jvm/src/test/resources/scalaparse/GenJSCode.scala
raw: https://raw.githubusercontent.com/lihaoyi/fastparse/b445fb5c7003983cec379a4a3b452a9adc3066eb/scalaparse/jvm/src/test/resources/scalaparse/GenJSCode.scala
) org.scalafmt.Error$: case List(keyArg, valueArg) =>
- js.Assign(js.JSBracketSelect(receiver, keyArg), valueArg)
+ js.Assign(js.JSBracketSelect(receiver, keyArg),
+ valueArg)
}
ScalaFile(
project: JetBrains
github: https://github.com/JetBrains/intellij-scala/blob/45c86930977278eb445b6aba79ac7011bc418490/src/org/jetbrains/plugins/scala/lang/psi/types/ScTypePsiTypeBridge.scala
raw: https://raw.githubusercontent.com/JetBrains/intellij-scala/45c86930977278eb445b6aba79ac7011bc418490/src/org/jetbrains/plugins/scala/lang/psi/types/ScTypePsiTypeBridge.scala
) org.scalafmt.Error$: if (typeParameters.length > 0) {
- ScParameterizedType(
- res,
- typeParameters.map(ptp =>
+ ScParameterizedType(res, typeParameters.map(ptp =>
new ScTypeParameterType(ptp, ScSubstitutor.empty)))
ScalaFile(
project: JetBrains
github: https://github.com/JetBrains/intellij-scala/blob/45c86930977278eb445b6aba79ac7011bc418490/compiler-settings/src/org/jetbrains/jps/incremental/scala/remote/play/SbtWatcherMain.scala
raw: https://raw.githubusercontent.com/JetBrains/intellij-scala/45c86930977278eb445b6aba79ac7011bc418490/compiler-settings/src/org/jetbrains/jps/incremental/scala/remote/play/SbtWatcherMain.scala
) org.scalafmt.Error$: def write2source(message: String) {
- out.write(
- Base64Converter
- .encode(
- MessageEvent(BuildMessage.Kind.INFO,
- message,
- None,
- None,
- None).toBytes)
- .getBytes)
+ out
+ .write(
+ Base64Converter
+ .encode(
+ MessageEvent(BuildMessage.Kind.INFO,
+ message,
+ None,
+ None,
+ None).toBytes)
+ .getBytes)
}
ScalaFile(
project: JetBrains
github: https://github.com/JetBrains/intellij-scala/blob/45c86930977278eb445b6aba79ac7011bc418490/jps-plugin/src/org/jetbrains/jps/incremental/scala/local/PackageObjectsData.scala
raw: https://raw.githubusercontent.com/JetBrains/intellij-scala/45c86930977278eb445b6aba79ac7011bc418490/jps-plugin/src/org/jetbrains/jps/incremental/scala/local/PackageObjectsData.scala
) org.scalafmt.Error$: def add(baseSource: File, packageObject: File): Unit = synchronized {
- baseSourceToPackageObjects.update(
- baseSource,
- baseSourceToPackageObjects.getOrElse(baseSource,
- HashSet.empty) + packageObject)
- packageObjectToBaseSources.update(
- packageObject,
- packageObjectToBaseSources.getOrElse(packageObject,
- HashSet.empty) + baseSource)
+ baseSourceToPackageObjects.update(baseSource,
+ baseSourceToPackageObjects.getOrElse(
+ baseSource,
+ HashSet.empty) + packageObject)
+ packageObjectToBaseSources.update(packageObject,
+ packageObjectToBaseSources.getOrElse(
+ packageObject,
+ HashSet.empty) + baseSource)
}
ScalaFile(
project: JetBrains
github: https://github.com/JetBrains/intellij-scala/blob/45c86930977278eb445b6aba79ac7011bc418490/src/org/jetbrains/plugins/scala/lang/refactoring/extractMethod/ScalaExtractMethodHandler.scala
raw: https://raw.githubusercontent.com/JetBrains/intellij-scala/45c86930977278eb445b6aba79ac7011bc418490/src/org/jetbrains/plugins/scala/lang/refactoring/extractMethod/ScalaExtractMethodHandler.scala
) org.scalafmt.Error$: def insertMethodCall() =
- ScalaExtractMethodUtils.replaceWithMethodCall(settings,
- settings.elements,
- param => param.oldName,
- output => output.paramName)
+ ScalaExtractMethodUtils
+ .replaceWithMethodCall(settings, settings.elements, param =>
+ param.oldName, output => output.paramName)
ScalaFile(
project: JetBrains
github: https://github.com/JetBrains/intellij-scala/blob/45c86930977278eb445b6aba79ac7011bc418490/jps-plugin/src/org/jetbrains/jps/incremental/scala/remote/Main.scala
raw: https://raw.githubusercontent.com/JetBrains/intellij-scala/45c86930977278eb445b6aba79ac7011bc418490/jps-plugin/src/org/jetbrains/jps/incremental/scala/remote/Main.scala
) org.scalafmt.Error$: val encode = Base64Converter.encode(event.toBytes)
- out.write(
- (if (standalone && !encode.endsWith("=")) encode + "="
- else encode).getBytes)
+ out.write((if (standalone && !encode.endsWith("=")) encode + "="
+ else encode).getBytes)
}
ScalaFile(
project: JetBrains
github: https://github.com/JetBrains/intellij-scala/blob/45c86930977278eb445b6aba79ac7011bc418490/src/org/jetbrains/plugins/scala/lang/refactoring/introduceParameter/ScalaIntroduceParameterUsageProcessor.scala
raw: https://raw.githubusercontent.com/JetBrains/intellij-scala/45c86930977278eb445b6aba79ac7011bc418490/src/org/jetbrains/plugins/scala/lang/refactoring/introduceParameter/ScalaIntroduceParameterUsageProcessor.scala
) org.scalafmt.Error$:
- ScalaRefactoringUtil.replaceOccurences(
- textRangeUsages.map(usage => TextRange.create(usage.range)),
- text,
- file)
+ ScalaRefactoringUtil.replaceOccurences(textRangeUsages.map(usage =>
+ TextRange.create(usage.range)), text, file)
textRangeUsages.foreach(_.processed = true)
ScalaFile(
project: apache
github: https://github.com/apache/spark/blob/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveInspectorSuite.scala
raw: https://raw.githubusercontent.com/apache/spark/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveInspectorSuite.scala
) org.scalafmt.Error$: MapType(IntegerType, IntegerType)) ::
- Literal.create(
- Row(1, 2.0d, 3.0f),
- StructType(
- StructField("c1", IntegerType) ::
- StructField("c2", DoubleType) ::
- StructField("c3",
- FloatType) :: Nil)) ::
+ Literal.create(Row(1, 2.0d, 3.0f),
+ StructType(
+ StructField("c1",
+ IntegerType) ::
+ StructField("c2",
+ DoubleType) ::
+ StructField(
+ "c3",
+ FloatType) :: Nil)) ::
Nil
ScalaFile(
project: akka
github: https://github.com/akka/akka/blob/3698928fbdaa8fef8e2037fbc51887ab60addefb/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/RangeDirectivesExamplesSpec.scala
raw: https://raw.githubusercontent.com/akka/akka/3698928fbdaa8fef8e2037fbc51887ab60addefb/akka-docs/rst/scala/code/docs/http/scaladsl/server/directives/RangeDirectivesExamplesSpec.scala
) org.scalafmt.Error$: // above to make sure we get two BodyParts
- Get() ~> addHeader(
- Range(ByteRange(0, 1),
- ByteRange(1, 2),
- ByteRange(6, 7))) ~> route ~> check {
+ Get() ~> addHeader(Range(ByteRange(0, 1),
+ ByteRange(1, 2),
+ ByteRange(6, 7))) ~> route ~> check {
headers.collectFirst { case `Content-Range`(_, _) => true } shouldBe None
ScalaFile(
project: apache
github: https://github.com/apache/spark/blob/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/core/src/main/scala/org/apache/spark/deploy/Client.scala
raw: https://raw.githubusercontent.com/apache/spark/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/core/src/main/scala/org/apache/spark/deploy/Client.scala
) org.scalafmt.Error$: val javaOpts = sparkJavaOpts ++ extraJavaOpts
- val command = new Command(
- mainClass,
- Seq("{{WORKER_URL}}",
- "{{USER_JAR}}",
- driverArgs.mainClass) ++ driverArgs.driverOptions,
- sys.env,
- classPathEntries,
- libraryPathEntries,
- javaOpts)
+ val command =
+ new Command(mainClass,
+ Seq("{{WORKER_URL}}",
+ "{{USER_JAR}}",
+ driverArgs.mainClass) ++ driverArgs.driverOptions,
+ sys.env,
+ classPathEntries,
+ libraryPathEntries,
+ javaOpts)
ScalaFile(
project: JetBrains
github: https://github.com/JetBrains/intellij-scala/blob/45c86930977278eb445b6aba79ac7011bc418490/src/org/jetbrains/plugins/scala/lang/refactoring/rename/RenameSuperMembersUtil.scala
raw: https://raw.githubusercontent.com/JetBrains/intellij-scala/45c86930977278eb445b6aba79ac7011bc418490/src/org/jetbrains/plugins/scala/lang/refactoring/rename/RenameSuperMembersUtil.scala
) org.scalafmt.Error$: classes.last)
- val additional =
- if (oneSuperClass) Nil
- else Seq((renameAllMarkerObject, null)) //option for rename all
+ val additional = if (oneSuperClass) Nil
+ else Seq((renameAllMarkerObject, null)) //option for rename all
val classesToNamed = additional ++: Map(classes.zip(allElements): _*)
@@ -167,4 +166,3 @@
if (ApplicationManager.getApplication.isUnitTestMode) {
- processor.execute(
- if (oneSuperClass) classes(0)
+ processor.execute(if (oneSuperClass) classes(0)
else renameAllMarkerObject) //in unit tests uses base member or all base members
ScalaFile(
project: apache
github: https://github.com/apache/spark/blob/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala
raw: https://raw.githubusercontent.com/apache/spark/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/Cast.scala
) org.scalafmt.Error$: case BooleanType =>
- buildCast[Boolean](
- _,
- b => changePrecision(if (b) Decimal.ONE else Decimal.ZERO, target))
+ buildCast[Boolean](_, b =>
+ changePrecision(if (b) Decimal.ONE else Decimal.ZERO, target))
case DateType =>
ScalaFile(
project: JetBrains
github: https://github.com/JetBrains/intellij-scala/blob/45c86930977278eb445b6aba79ac7011bc418490/src/org/jetbrains/plugins/hocon/parser/HoconPsiParser.scala
raw: https://raw.githubusercontent.com/JetBrains/intellij-scala/45c86930977278eb445b6aba79ac7011bc418490/src/org/jetbrains/plugins/hocon/parser/HoconPsiParser.scala
) org.scalafmt.Error$: case e: MalformedURLException =>
- tokenError(
- if (e.getMessage != null) e.getMessage
+ tokenError(if (e.getMessage != null) e.getMessage
else "malformed URL")
ScalaFile(
project: JetBrains
github: https://github.com/JetBrains/intellij-scala/blob/45c86930977278eb445b6aba79ac7011bc418490/src/org/jetbrains/plugins/hocon/psi/HoconPsiElement.scala
raw: https://raw.githubusercontent.com/JetBrains/intellij-scala/45c86930977278eb445b6aba79ac7011bc418490/src/org/jetbrains/plugins/hocon/psi/HoconPsiElement.scala
) org.scalafmt.Error$: def enclosingObjectField: HObjectField =
- forParent(keyedParent => keyedParent.enclosingObjectField,
- objectField => objectField)
+ forParent(keyedParent => keyedParent.enclosingObjectField, objectField =>
+ objectField)
@@ -200,4 +200,4 @@
def fieldsInPathBackward: Stream[HKeyedField] =
- forParent(keyedField => this #:: keyedField.fieldsInPathBackward,
- of => Stream(this))
+ forParent(keyedField => this #:: keyedField.fieldsInPathBackward, of =>
+ Stream(this))
ScalaFile(
project: apache
github: https://github.com/apache/spark/blob/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala
raw: https://raw.githubusercontent.com/apache/spark/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala
) org.scalafmt.Error$: case struct: StructType =>
- create(InternalRow.fromSeq(
- struct.fields.map(f => default(f.dataType).value)),
- struct)
+ create(InternalRow.fromSeq(struct.fields.map(f =>
+ default(f.dataType).value)), struct)
case other =>
ScalaFile(
project: apache
github: https://github.com/apache/spark/blob/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/mathExpressions.scala
raw: https://raw.githubusercontent.com/apache/spark/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/mathExpressions.scala
) org.scalafmt.Error$: override def genCode(ctx: CodegenContext, ev: ExprCode): String = {
- defineCodeGen(
- ctx,
- ev,
- (c) => s"UTF8String.fromString(java.lang.Long.toBinaryString($c))")
+ defineCodeGen(ctx, ev, (c) =>
+ s"UTF8String.fromString(java.lang.Long.toBinaryString($c))")
}
ScalaFile(
project: JetBrains
github: https://github.com/JetBrains/intellij-scala/blob/45c86930977278eb445b6aba79ac7011bc418490/src/org/jetbrains/plugins/scala/lang/resolve/ReferenceExpressionResolver.scala
raw: https://raw.githubusercontent.com/JetBrains/intellij-scala/45c86930977278eb445b6aba79ac7011bc418490/src/org/jetbrains/plugins/scala/lang/resolve/ReferenceExpressionResolver.scala
) org.scalafmt.Error$: case call: ScMethodCall if !call.isUpdateCall =>
- ContextInfo(Some(call.argumentExpressions),
- () => call.expectedType(),
- isUnderscore = false)
+ ContextInfo(Some(call.argumentExpressions), () =>
+ call.expectedType(), isUnderscore = false)
case call: ScMethodCall =>
ScalaFile(
project: apache
github: https://github.com/apache/spark/blob/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala
raw: https://raw.githubusercontent.com/apache/spark/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/core/src/main/scala/org/apache/spark/rdd/PairRDDFunctions.scala
) org.scalafmt.Error$: val cleanedSeqOp = self.context.clean(seqOp)
- combineByKeyWithClassTag[U]((v: V) => cleanedSeqOp(createZero(), v),
- cleanedSeqOp,
- combOp,
- partitioner)
+ combineByKeyWithClassTag[U]((v: V) =>
+ cleanedSeqOp(createZero(), v), cleanedSeqOp, combOp, partitioner)
}
ScalaFile(
project: apache
github: https://github.com/apache/spark/blob/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/sql/hive/src/test/scala/org/apache/spark/sql/sources/BucketedWriteSuite.scala
raw: https://raw.githubusercontent.com/apache/spark/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/sql/hive/src/test/scala/org/apache/spark/sql/sources/BucketedWriteSuite.scala
) org.scalafmt.Error$: val rows = qe.toRdd.map(_.copy()).collect()
- val getBucketId = UnsafeProjection.create(
- HashPartitioning(qe.analyzed.output,
- numBuckets).partitionIdExpression :: Nil,
- qe.analyzed.output)
+ val getBucketId =
+ UnsafeProjection.create(HashPartitioning(
+ qe.analyzed.output,
+ numBuckets).partitionIdExpression :: Nil,
+ qe.analyzed.output)
ScalaFile(
project: JetBrains
github: https://github.com/JetBrains/intellij-scala/blob/45c86930977278eb445b6aba79ac7011bc418490/src/org/jetbrains/plugins/scala/lang/resolve/ResolveUtils.scala
raw: https://raw.githubusercontent.com/JetBrains/intellij-scala/45c86930977278eb445b6aba79ac7011bc418490/src/org/jetbrains/plugins/scala/lang/resolve/ResolveUtils.scala
) org.scalafmt.Error$: case _ =>
- m.getParameterList.getParameters.map {
- param =>
- val scType = s.subst(param.exactParamType())
- new Parameter("",
- None,
- scType,
- scType,
- false,
- param.isVarArgs,
- false,
- param.index,
- Some(param))
+ m.getParameterList.getParameters.map { param =>
+ val scType = s.subst(param.exactParamType())
+ new Parameter("",
+ None,
+ scType,
+ scType,
+ false,
+ param.isVarArgs,
+ false,
+ param.index,
+ Some(param))
}
ScalaFile(
project: JetBrains
github: https://github.com/JetBrains/intellij-scala/blob/45c86930977278eb445b6aba79ac7011bc418490/src/org/jetbrains/plugins/scala/lang/resolve/processor/ConstructorResolveProcessor.scala
raw: https://raw.githubusercontent.com/JetBrains/intellij-scala/45c86930977278eb445b6aba79ac7011bc418490/src/org/jetbrains/plugins/scala/lang/resolve/processor/ConstructorResolveProcessor.scala
) org.scalafmt.Error$: constr =>
- new ScalaResolveResult(
- constr,
- subst,
- getImports(state),
- nameShadow0,
- parentElement = Some(clazz),
- boundClass = getBoundClass(state),
- fromType = fromType,
- isAccessible = isAccessible(constr,
- ref) && accessible)))
+ new ScalaResolveResult(constr,
+ subst,
+ getImports(state),
+ nameShadow0,
+ parentElement = Some(clazz),
+ boundClass = getBoundClass(state),
+ fromType = fromType,
+ isAccessible = isAccessible(
+ constr,
+ ref) && accessible)))
}
@@ -111,12 +111,13 @@
constr =>
- new ScalaResolveResult(
- constr,
- subst.followed(s),
- getImports(state),
- nameShadow0,
- parentElement = Some(ta),
- boundClass = getBoundClass(state),
- fromType = fromType,
- isAccessible = isAccessible(constr,
- ref) && accessible)))
+ new ScalaResolveResult(constr,
+ subst.followed(s),
+ getImports(state),
+ nameShadow0,
+ parentElement = Some(ta),
+ boundClass =
+ getBoundClass(state),
+ fromType = fromType,
+ isAccessible = isAccessible(
+ constr,
+ ref) && accessible)))
}
ScalaFile(
project: apache
github: https://github.com/apache/spark/blob/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ScalaUDF.scala
raw: https://raw.githubusercontent.com/apache/spark/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/ScalaUDF.scala
) org.scalafmt.Error$: {
- func(converter0(child0.eval(input)), converter1(child1.eval(input)))
+ func(converter0(child0.eval(input)),
+ converter1(child1.eval(input)))
}
ScalaFile(
project: apache
github: https://github.com/apache/spark/blob/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/core/src/main/scala/org/apache/spark/metrics/sink/MetricsServlet.scala
raw: https://raw.githubusercontent.com/apache/spark/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/core/src/main/scala/org/apache/spark/metrics/sink/MetricsServlet.scala
) org.scalafmt.Error$: Array[ServletContextHandler](
- createServletHandler(
- servletPath,
- new ServletParams(request => getMetricsSnapshot(request),
- "text/json"),
- securityMgr,
- conf)
+ createServletHandler(servletPath,
+ new ServletParams(request =>
+ getMetricsSnapshot(request), "text/json"),
+ securityMgr,
+ conf)
)
ScalaFile(
project: apache
github: https://github.com/apache/spark/blob/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/core/src/main/scala/org/apache/spark/scheduler/SparkListener.scala
raw: https://raw.githubusercontent.com/apache/spark/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/core/src/main/scala/org/apache/spark/scheduler/SparkListener.scala
) org.scalafmt.Error$: taskInfoMetrics)
- showBytesDistribution("task result size:",
- (_, metric) => Some(metric.resultSize),
- taskInfoMetrics)
+ showBytesDistribution("task result size:", (_, metric) =>
+ Some(metric.resultSize), taskInfoMetrics)
ScalaFile(
project: akka
github: https://github.com/akka/akka/blob/3698928fbdaa8fef8e2037fbc51887ab60addefb/akka-http-core/src/main/scala/akka/http/impl/engine/client/OutgoingConnectionBlueprint.scala
raw: https://raw.githubusercontent.com/akka/akka/3698928fbdaa8fef8e2037fbc51887ab60addefb/akka-http-core/src/main/scala/akka/http/impl/engine/client/OutgoingConnectionBlueprint.scala
) org.scalafmt.Error$: val renderingContextCreation = b.add {
- Flow[HttpRequest] map {
- request ⇒
- val sendEntityTrigger =
- request.headers collectFirst {
- case headers.Expect.`100-continue` ⇒ Promise[NotUsed]().future
- }
- RequestRenderingContext(request, hostHeader, sendEntityTrigger)
+ Flow[HttpRequest] map { request ⇒
+ val sendEntityTrigger =
+ request.headers collectFirst {
+ case headers.Expect.`100-continue` ⇒ Promise[NotUsed]().future
+ }
+ RequestRenderingContext(request, hostHeader, sendEntityTrigger)
}
@@ -96,9 +95,8 @@
new HttpResponseParser(parserSettings,
- HttpHeaderParser(parserSettings) {
- info ⇒
- if (parserSettings.illegalHeaderWarnings)
- logParsingError(
- info withSummaryPrepended "Illegal response header",
- log,
- parserSettings.errorLoggingVerbosity)
+ HttpHeaderParser(parserSettings) { info ⇒
+ if (parserSettings.illegalHeaderWarnings)
+ logParsingError(
+ info withSummaryPrepended "Illegal response header",
+ log,
+ parserSettings.errorLoggingVerbosity)
})
ScalaFile(
project: JetBrains
github: https://github.com/JetBrains/intellij-scala/blob/45c86930977278eb445b6aba79ac7011bc418490/test/org/jetbrains/plugins/scala/lang/overrideImplement/ScalaOIUtilTest.scala
raw: https://raw.githubusercontent.com/JetBrains/intellij-scala/45c86930977278eb445b6aba79ac7011bc418490/test/org/jetbrains/plugins/scala/lang/overrideImplement/ScalaOIUtilTest.scala
) org.scalafmt.Error$:
- private def unimplementedIn(
- @Language(value = "Scala",
- prefix = Prefix,
- suffix = Suffix) code: String) = {
+ private def unimplementedIn(@Language(value = "Scala",
+ prefix = Prefix,
+ suffix = Suffix) code: String) = {
val text: String = "" + code + Suffix
ScalaFile(
project: akka
github: https://github.com/akka/akka/blob/3698928fbdaa8fef8e2037fbc51887ab60addefb/akka-http-core/src/main/scala/akka/http/impl/engine/parsing/HttpRequestParser.scala
raw: https://raw.githubusercontent.com/akka/akka/3698928fbdaa8fef8e2037fbc51887ab60addefb/akka-http-core/src/main/scala/akka/http/impl/engine/parsing/HttpRequestParser.scala
) org.scalafmt.Error$: if (rawRequestUriHeader)
- `Raw-Request-URI`(
- new String(uriBytes,
- HttpCharsets.`US-ASCII`.nioCharset)) :: headers
+ `Raw-Request-URI`(new String(
+ uriBytes,
+ HttpCharsets.`US-ASCII`.nioCharset)) :: headers
else headers
ScalaFile(
project: apache
github: https://github.com/apache/spark/blob/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerBackend.scala
raw: https://raw.githubusercontent.com/apache/spark/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerBackend.scala
) org.scalafmt.Error$: logInfo(
- "Executor lost: %s, marking slave %s as lost"
- .format(executorId.getValue, slaveId.getValue))
+ "Executor lost: %s, marking slave %s as lost".format(
+ executorId.getValue,
+ slaveId.getValue))
recordSlaveLost(d, slaveId, ExecutorExited(status, exitCausedByApp = true))
ScalaFile(
project: JetBrains
github: https://github.com/JetBrains/intellij-scala/blob/45c86930977278eb445b6aba79ac7011bc418490/src/org/jetbrains/plugins/scala/project/settings/ScalaCompilerSettings.scala
raw: https://raw.githubusercontent.com/JetBrains/intellij-scala/45c86930977278eb445b6aba79ac7011bc418490/src/org/jetbrains/plugins/scala/project/settings/ScalaCompilerSettings.scala
) org.scalafmt.Error$: ("-language:postfixOps", () => postfixOps, postfixOps = _),
- ("-language:reflectiveCalls",
- () => reflectiveCalls,
- reflectiveCalls = _),
+ ("-language:reflectiveCalls", () =>
+ reflectiveCalls, reflectiveCalls = _),
("-language:implicitConversions",
@@ -63,5 +62,4 @@
("-explaintypes", () => explainTypeErrors, explainTypeErrors = _),
- ("-no-specialization",
- () => !specialization,
- (b: Boolean) => specialization = !b),
+ ("-no-specialization", () => !specialization, (b: Boolean) =>
+ specialization = !b),
("-P:continuations:enable", () => continuations, continuations = _))
ScalaFile(
project: akka
github: https://github.com/akka/akka/blob/3698928fbdaa8fef8e2037fbc51887ab60addefb/akka-http-core/src/main/scala/akka/http/impl/engine/server/HttpServerBluePrint.scala
raw: https://raw.githubusercontent.com/akka/akka/3698928fbdaa8fef8e2037fbc51887ab60addefb/akka-http-core/src/main/scala/akka/http/impl/engine/server/HttpServerBluePrint.scala
) org.scalafmt.Error$:
- def streamRequestEntity(
- creator: (Source[ParserOutput.RequestOutput,
- NotUsed]) => RequestEntity): RequestEntity = {
+ def streamRequestEntity(creator: (Source[ParserOutput.RequestOutput,
+ NotUsed]) => RequestEntity)
+ : RequestEntity = {
// stream incoming chunks into the request entity until we reach the end of it
ScalaFile(
project: JetBrains
github: https://github.com/JetBrains/intellij-scala/blob/45c86930977278eb445b6aba79ac7011bc418490/src/org/jetbrains/plugins/scala/testingSupport/test/structureView/TestNodeProvider.scala
raw: https://raw.githubusercontent.com/JetBrains/intellij-scala/45c86930977278eb445b6aba79ac7011bc418490/src/org/jetbrains/plugins/scala/testingSupport/test/structureView/TestNodeProvider.scala
) org.scalafmt.Error$: }).exists(
- refExpr =>
- checkRefExpr(refExpr,
- "pendingUntilFixed",
- List("java.lang.String")) || checkRefExpr(
- refExpr,
- "pendingUntilFixed"))
+ refExpr =>
+ checkRefExpr(refExpr,
+ "pendingUntilFixed",
+ List("java.lang.String")) || checkRefExpr(
+ refExpr,
+ "pendingUntilFixed"))
}
@@ -511,6 +511,4 @@
processChildren(getInnerInfixExprs(expr), extractFreeSpec, project)
- extractScalaTestScInfixExpr(
- expr,
- ExtractEntry("$minus", true, false, _ => children, List("void")),
- project).orElse(
+ extractScalaTestScInfixExpr(expr, ExtractEntry("$minus", true, false, _ =>
+ children, List("void")), project).orElse(
extractScalaTestScInfixExpr(
@@ -609,10 +607,7 @@
processChildren(getInnerMethodCalls(expr), extractFeatureSpec, project)
- extractScMethodCall(expr,
- ExtractEntry("feature",
- true,
- false,
- _ => children,
- List("java.lang.String"),
- List("void")),
- project).orElse(
+ extractScMethodCall(
+ expr,
+ ExtractEntry("feature", true, false, _ =>
+ children, List("java.lang.String"), List("void")),
+ project).orElse(
extractScMethodCall(
ScalaFile(
project: apache
github: https://github.com/apache/spark/blob/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala
raw: https://raw.githubusercontent.com/apache/spark/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala
) org.scalafmt.Error$: responder: Responder[Seq[Node]]): ServletParams[Seq[Node]] =
- new ServletParams(responder,
- "text/html",
- (in: Seq[Node]) => "<!DOCTYPE html>" + in.toString)
+ new ServletParams(responder, "text/html", (in: Seq[Node]) =>
+ "<!DOCTYPE html>" + in.toString)
ScalaFile(
project: apache
github: https://github.com/apache/spark/blob/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala
raw: https://raw.githubusercontent.com/apache/spark/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/analysis/AnalysisSuite.scala
) org.scalafmt.Error$: "SPARK-8654: different types in inlist but can be converted to a common type") {
- val plan =
- Project(Alias(In(Literal(null), Seq(Literal(1), Literal(1.2345))),
- "a")() :: Nil,
- LocalRelation())
+ val plan = Project(Alias(In(Literal(null),
+ Seq(Literal(1), Literal(1.2345))),
+ "a")() :: Nil,
+ LocalRelation())
assertAnalysisSuccess(plan)
ScalaFile(
project: akka
github: https://github.com/akka/akka/blob/3698928fbdaa8fef8e2037fbc51887ab60addefb/akka-http-core/src/test/scala/akka/http/impl/engine/ws/MessageSpec.scala
raw: https://raw.githubusercontent.com/akka/akka/3698928fbdaa8fef8e2037fbc51887ab60addefb/akka-http-core/src/test/scala/akka/http/impl/engine/ws/MessageSpec.scala
) org.scalafmt.Error$: val pingData1 = pingData.drop(3)
- pushInput(
- frameHeader(Opcode.Ping,
- 5,
- fin = true,
- mask = Some(pingMask)) ++ pingData0)
+ pushInput(frameHeader(Opcode.Ping,
+ 5,
+ fin = true,
+ mask = Some(pingMask)) ++ pingData0)
expectNoNetworkData()
ScalaFile(
project: akka
github: https://github.com/akka/akka/blob/3698928fbdaa8fef8e2037fbc51887ab60addefb/akka-http-core/src/test/scala/akka/http/impl/engine/ws/WebSocketClientSpec.scala
raw: https://raw.githubusercontent.com/akka/akka/3698928fbdaa8fef8e2037fbc51887ab60addefb/akka-http-core/src/test/scala/akka/http/impl/engine/ws/WebSocketClientSpec.scala
) org.scalafmt.Error$:
- val graph = RunnableGraph.fromGraph(GraphDSL.create(clientLayer) {
- implicit b ⇒ client ⇒
- import GraphDSL.Implicits._
- Source.fromPublisher(netIn) ~> Flow[ByteString].map(
- SessionBytes(null, _)) ~> client.in2
- client.out1 ~> Flow[SslTlsOutbound].collect {
- case SendBytes(x) ⇒ x
- } ~> netOut.sink
- client.out2 ~> clientImplementation ~> client.in1
- ClosedShape
- })
+ val graph =
+ RunnableGraph.fromGraph(GraphDSL.create(clientLayer) {
+ implicit b ⇒ client ⇒
+ import GraphDSL.Implicits._
+ Source.fromPublisher(netIn) ~> Flow[ByteString].map(
+ SessionBytes(null, _)) ~> client.in2
+ client.out1 ~> Flow[SslTlsOutbound].collect {
+ case SendBytes(x) ⇒ x
+ } ~> netOut.sink
+ client.out2 ~> clientImplementation ~> client.in1
+ ClosedShape
+ })
ScalaFile(
project: JetBrains
github: https://github.com/JetBrains/intellij-scala/blob/45c86930977278eb445b6aba79ac7011bc418490/src/org/jetbrains/plugins/scala/debugger/evaluation/ScalaEvaluatorBuilderUtil.scala
raw: https://raw.githubusercontent.com/JetBrains/intellij-scala/45c86930977278eb445b6aba79ac7011bc418490/src/org/jetbrains/plugins/scala/debugger/evaluation/ScalaEvaluatorBuilderUtil.scala
) org.scalafmt.Error$: def conditionalOr =
- binaryEval("||",
- (first, second) =>
- new ScalaIfEvaluator(first, trueEval, Some(second)))
+ binaryEval("||", (first, second) =>
+ new ScalaIfEvaluator(first, trueEval, Some(second)))
def conditionalAnd =
- binaryEval("&&",
- (first, second) =>
- new ScalaIfEvaluator(first, second, Some(falseEval)))
+ binaryEval("&&", (first, second) =>
+ new ScalaIfEvaluator(first, second, Some(falseEval)))
ScalaFile(
project: apache
github: https://github.com/apache/spark/blob/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/external/kafka/src/test/scala/org/apache/spark/streaming/kafka/DirectKafkaStreamSuite.scala
raw: https://raw.githubusercontent.com/apache/spark/2c5b18fb0fdeabd378dd97e91f72d1eac4e21cc7/external/kafka/src/test/scala/org/apache/spark/streaming/kafka/DirectKafkaStreamSuite.scala
) org.scalafmt.Error$: .getEarliestLeaderOffsets(topicPartitions)
- .fold(e => Map.empty[TopicAndPartition, Long],
- m => m.mapValues(lo => lo.offset))
+ .fold(e => Map.empty[TopicAndPartition, Long], m =>
+ m.mapValues(lo => lo.offset))
ScalaFile(
project: akka
github: https://github.com/akka/akka/blob/3698928fbdaa8fef8e2037fbc51887ab60addefb/akka-http-core/src/test/scala/akka/http/scaladsl/model/HttpEntitySpec.scala
raw: https://raw.githubusercontent.com/akka/akka/3698928fbdaa8fef8e2037fbc51887ab60addefb/akka-http-core/src/test/scala/akka/http/scaladsl/model/HttpEntitySpec.scala
) org.scalafmt.Error$: "Chunked" in {
- withReturnType[Chunked](
- Chunked(tpe,
- source(Chunk(abc),
- Chunk(fgh),
- Chunk(ijk),
- LastChunk)).withoutSizeLimit)
+ withReturnType[Chunked](Chunked(tpe,
+ source(Chunk(abc),
+ Chunk(fgh),
+ Chunk(ijk),
+ LastChunk)).withoutSizeLimit)
withReturnType[RequestEntity](
ScalaFile(
project: akka
github: https://github.com/akka/akka/blob/3698928fbdaa8fef8e2037fbc51887ab60addefb/akka-http-core/src/test/scala/akka/http/scaladsl/util/FastFutureSpec.scala
raw: https://raw.githubusercontent.com/akka/akka/3698928fbdaa8fef8e2037fbc51887ab60addefb/akka-http-core/src/test/scala/akka/http/scaladsl/util/FastFutureSpec.scala
) org.scalafmt.Error$: "Success -> Failure" in {
- test(Success(23),
- _.transformWith(_ ⇒ FastFuture.failed(TheException), neverCalled)) {
+ test(Success(23), _.transformWith(_ ⇒
+ FastFuture.failed(TheException), neverCalled)) {
_ shouldEqual Failure(TheException)
ScalaFile(
project: akka
github: https://github.com/akka/akka/blob/3698928fbdaa8fef8e2037fbc51887ab60addefb/akka-parsing/src/main/scala/akka/parboiled2/DynamicRuleDispatch.scala
raw: https://raw.githubusercontent.com/akka/akka/3698928fbdaa8fef8e2037fbc51887ab60addefb/akka-parsing/src/main/scala/akka/parboiled2/DynamicRuleDispatch.scala
) org.scalafmt.Error$: */
- def apply[P <: Parser, L <: HList](
- ruleNames: String*): (DynamicRuleDispatch[P,
- L],
- immutable.Seq[String]) = macro __create[P, L]
+ def apply[P <: Parser, L <: HList](ruleNames: String*): (DynamicRuleDispatch[
+ P,
+ L],
+ immutable.Seq[
+ String]) = macro __create[
+ P,
+ L]
ScalaFile(
project: akka
github: https://github.com/akka/akka/blob/3698928fbdaa8fef8e2037fbc51887ab60addefb/akka-remote/src/test/scala/akka/remote/RemoteDeathWatchSpec.scala
raw: https://raw.githubusercontent.com/akka/akka/3698928fbdaa8fef8e2037fbc51887ab60addefb/akka-remote/src/test/scala/akka/remote/RemoteDeathWatchSpec.scala
) org.scalafmt.Error$: "receive Terminated when watched node is unknown host" in {
- val path = RootActorPath(
- Address("akka.tcp",
- system.name,
- "unknownhost",
- 2552)) / "user" / "subject"
+ val path = RootActorPath(Address("akka.tcp",
+ system.name,
+ "unknownhost",
+ 2552)) / "user" / "subject"
system.actorOf(Props(new Actor {
@@ -96,7 +95,6 @@
"receive ActorIdentity(None) when identified node is unknown host" in {
- val path = RootActorPath(
- Address("akka.tcp",
- system.name,
- "unknownhost2",
- 2552)) / "user" / "subject"
+ val path = RootActorPath(Address("akka.tcp",
+ system.name,
+ "unknownhost2",
+ 2552)) / "user" / "subject"
system.actorSelection(path) ! Identify(path)
ScalaFile(
project: akka
github: https://github.com/akka/akka/blob/3698928fbdaa8fef8e2037fbc51887ab60addefb/akka-remote-tests/src/test/scala/akka/remote/testconductor/BarrierSpec.scala
raw: https://raw.githubusercontent.com/akka/akka/3698928fbdaa8fef8e2037fbc51887ab60addefb/akka-remote-tests/src/test/scala/akka/remote/testconductor/BarrierSpec.scala
) org.scalafmt.Error$: fail(
- "Expected " + Failed(
- barrier,
- DuplicateNode(Data(Set(nodeA), "", Nil, null),
- nodeB)) + " but got " + x)
+ "Expected " + Failed(barrier,
+ DuplicateNode(
+ Data(Set(nodeA), "", Nil, null),
+ nodeB)) + " but got " + x)
}
Success: 9273
Created
June 22, 2016 12:45
-
-
Save olafurpg/7d95a03f1598acff5ace7552a38b1e54 to your computer and use it in GitHub Desktop.
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment