Skip to content

Instantly share code, notes, and snippets.

@koen-dejonghe
Last active May 27, 2018 08:12
Show Gist options
  • Save koen-dejonghe/f5b785f949fb5c6a16599b7820de601a to your computer and use it in GitHub Desktop.
Save koen-dejonghe/f5b785f949fb5c6a16599b7820de601a to your computer and use it in GitHub Desktop.
Using the JVM's garbage collector to control off heap memory
package jtorch.cpu
import java.util.concurrent.atomic.AtomicLong
import com.typesafe.scalalogging.LazyLogging
import scala.concurrent.{Await, Future}
import scala.concurrent.duration._
import scala.language.postfixOps
object NaiveMemoryManagement extends App with LazyLogging {
logger.info("*** starting sequential **************************")
sequential()
logger.info("*** starting parallel ****************************")
parallel()
def sequential(): Unit = {
val t3 = MyTensor.zeros(100, 100) // this one will only get garbage collected at the end of the program
for (i <- 1 to 100) {
MyTensor.zeros(3000, 3000) // these will get GC'ed as soon as as System.gc() is called
Thread.sleep(1)
}
logger.info("DONE")
logger.info(t3.cPtr.toString)
logger.info(t3.payload.toString)
logger.info(TH.THFloatTensor_desc(t3.payload).getStr) // this should still work
logger.info(TH.THFloatTensor_get2d(t3.payload, 10, 10).toString)
}
def parallel(): Unit = {
import scala.concurrent.ExecutionContext.Implicits.global
val t3 = MyTensor.zeros(100, 100) // this one will only get garbage collected at the end of the program
val futures = Future.sequence {
(1 to 100).map { _ =>
Future {
MyTensor.zeros(3000, 3000) // these will get GC'ed as soon as as System.gc() is called
Thread.sleep(1)
}
}
}
Await.result(futures, 10 seconds)
logger.info("DONE")
logger.info(t3.cPtr.toString)
logger.info(t3.payload.toString)
logger.info(TH.THFloatTensor_desc(t3.payload).getStr) // this should still work
logger.info(TH.THFloatTensor_get2d(t3.payload, 10, 10).toString)
}
}
case class MyTensor private (payload: SWIGTYPE_p_THFloatTensor,
cPtr: Long,
size: Long) extends LazyLogging {
override def finalize(): Unit = {
THJNI.THFloatTensor_free(cPtr)
val memSize = MyTensor.memoryWaterMark.addAndGet(-size)
logger.info(s"freeing $cPtr (mem = $memSize)")
}
}
object MyTensor extends LazyLogging {
val threshold: Long = 2L * 1024L * 1024L * 1024L // 2 GB
val memoryWaterMark = new AtomicLong(0)
def memCheck(size: Long): Unit =
if (memoryWaterMark.addAndGet(size) > threshold) {
System.gc()
}
def zeros(d1: Long, d2: Long): MyTensor = {
val tensor = makeTensorOfZeros(d1, d2)
logger.info(s"creating ${tensor.cPtr}")
memCheck(tensor.size)
tensor
}
// boiler plate to create a Torch tensor of floats
def makeTensorOfZeros(d1: Long, d2: Long): MyTensor = {
val size: SWIGTYPE_p_THLongStorage = TH.THLongStorage_newWithSize2(d1, d2)
val cPtr = THJNI.THFloatTensor_newWithSize2d(d1, d2)
val t = new SWIGTYPE_p_THFloatTensor(cPtr, false)
TH.THFloatTensor_zeros(t, size)
MyTensor(t, cPtr, d1 * d2 * 4) // float = 4 bytes
}
}
@koen-dejonghe
Copy link
Author

koen-dejonghe commented May 26, 2018

Produces

10:09:43.259 [main] INFO jtorch.cpu.NaiveMemoryManagement$ - *** starting sequential **************************
10:09:43.378 [main] INFO jtorch.cpu.MyTensor$ - creating 140312567714048
10:09:43.521 [main] INFO jtorch.cpu.MyTensor$ - creating 140312597006160
10:09:43.542 [main] INFO jtorch.cpu.MyTensor$ - creating 140312598094832
10:09:43.562 [main] INFO jtorch.cpu.MyTensor$ - creating 140312597006352
10:09:43.581 [main] INFO jtorch.cpu.MyTensor$ - creating 140312570028640
10:09:43.600 [main] INFO jtorch.cpu.MyTensor$ - creating 140312570030240
10:09:43.619 [main] INFO jtorch.cpu.MyTensor$ - creating 140312569321376
10:09:43.639 [main] INFO jtorch.cpu.MyTensor$ - creating 140312598095024
10:09:43.657 [main] INFO jtorch.cpu.MyTensor$ - creating 140312569320816
10:09:43.677 [main] INFO jtorch.cpu.MyTensor$ - creating 140312568055760
10:09:43.696 [main] INFO jtorch.cpu.MyTensor$ - creating 140312599094448
10:09:43.714 [main] INFO jtorch.cpu.MyTensor$ - creating 140312570030432
10:09:43.733 [main] INFO jtorch.cpu.MyTensor$ - creating 140312598095216
10:09:43.751 [main] INFO jtorch.cpu.MyTensor$ - creating 140312600516688
10:09:43.771 [main] INFO jtorch.cpu.MyTensor$ - creating 140312570030560
10:09:43.789 [main] INFO jtorch.cpu.MyTensor$ - creating 140312569323504
10:09:43.807 [main] INFO jtorch.cpu.MyTensor$ - creating 140312600517904
10:09:43.827 [main] INFO jtorch.cpu.MyTensor$ - creating 140312570797328
10:09:43.845 [main] INFO jtorch.cpu.MyTensor$ - creating 140312569323648
10:09:43.863 [main] INFO jtorch.cpu.MyTensor$ - creating 140312570030752
10:09:43.880 [main] INFO jtorch.cpu.MyTensor$ - creating 140312568055904
10:09:43.898 [main] INFO jtorch.cpu.MyTensor$ - creating 140312569323792
10:09:43.916 [main] INFO jtorch.cpu.MyTensor$ - creating 140312569323952
10:09:43.934 [main] INFO jtorch.cpu.MyTensor$ - creating 140312569324144
10:09:43.953 [main] INFO jtorch.cpu.MyTensor$ - creating 140312569324336
10:09:43.970 [main] INFO jtorch.cpu.MyTensor$ - creating 140312569324528
10:09:43.990 [main] INFO jtorch.cpu.MyTensor$ - creating 140312569324720
10:09:44.010 [main] INFO jtorch.cpu.MyTensor$ - creating 140312569324912
10:09:44.028 [main] INFO jtorch.cpu.MyTensor$ - creating 140312569325104
10:09:44.046 [main] INFO jtorch.cpu.MyTensor$ - creating 140312568056096
10:09:44.064 [main] INFO jtorch.cpu.MyTensor$ - creating 140312568056288
10:09:44.082 [main] INFO jtorch.cpu.MyTensor$ - creating 140312568056480
10:09:44.099 [main] INFO jtorch.cpu.MyTensor$ - creating 140312568056672
10:09:44.116 [main] INFO jtorch.cpu.MyTensor$ - creating 140312569325296
10:09:44.134 [main] INFO jtorch.cpu.MyTensor$ - creating 140312570030944
10:09:44.152 [main] INFO jtorch.cpu.MyTensor$ - creating 140312570031136
10:09:44.169 [main] INFO jtorch.cpu.MyTensor$ - creating 140312570031328
10:09:44.187 [main] INFO jtorch.cpu.MyTensor$ - creating 140312570031520
10:09:44.205 [main] INFO jtorch.cpu.MyTensor$ - creating 140312569325488
10:09:44.222 [main] INFO jtorch.cpu.MyTensor$ - creating 140312569325680
10:09:44.240 [main] INFO jtorch.cpu.MyTensor$ - creating 140312569325872
10:09:44.258 [main] INFO jtorch.cpu.MyTensor$ - creating 140312569326064
10:09:44.276 [main] INFO jtorch.cpu.MyTensor$ - creating 140312570031712
10:09:44.294 [main] INFO jtorch.cpu.MyTensor$ - creating 140312570031904
10:09:44.312 [main] INFO jtorch.cpu.MyTensor$ - creating 140312570032096
10:09:44.331 [main] INFO jtorch.cpu.MyTensor$ - creating 140312569326256
10:09:44.348 [main] INFO jtorch.cpu.MyTensor$ - creating 140312600518496
10:09:44.366 [main] INFO jtorch.cpu.MyTensor$ - creating 140312600518688
10:09:44.384 [main] INFO jtorch.cpu.MyTensor$ - creating 140312600518880
10:09:44.402 [main] INFO jtorch.cpu.MyTensor$ - creating 140312570032288
10:09:44.421 [main] INFO jtorch.cpu.MyTensor$ - creating 140312600519008
10:09:44.438 [main] INFO jtorch.cpu.MyTensor$ - creating 140312600519200
10:09:44.456 [main] INFO jtorch.cpu.MyTensor$ - creating 140312600519392
10:09:44.474 [main] INFO jtorch.cpu.MyTensor$ - creating 140312569326448
10:09:44.491 [main] INFO jtorch.cpu.MyTensor$ - creating 140312600519584
10:09:44.510 [main] INFO jtorch.cpu.MyTensor$ - creating 140312569326896
10:09:44.529 [main] INFO jtorch.cpu.MyTensor$ - creating 140312599096000
10:09:44.546 [main] INFO jtorch.cpu.MyTensor$ - creating 140312570032688
10:09:44.565 [main] INFO jtorch.cpu.MyTensor$ - creating 140312570032880
10:09:44.583 [main] INFO jtorch.cpu.MyTensor$ - creating 140312570033072
10:09:44.601 [main] INFO jtorch.cpu.MyTensor$ - creating 140312570033264
10:09:44.620 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312598094832 (mem = 2124040000)
10:09:44.623 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312570033072 (mem = 2088040000)
10:09:44.625 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312570032880 (mem = 2052040000)
10:09:44.629 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312570032688 (mem = 2016040000)
10:09:44.632 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312599096000 (mem = 1980040000)
10:09:44.634 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312569326896 (mem = 1944040000)
10:09:44.636 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312600519584 (mem = 1908040000)
10:09:44.637 [main] INFO jtorch.cpu.MyTensor$ - creating 140312600271264
10:09:44.639 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312569326448 (mem = 1908040000)
10:09:44.643 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312600519392 (mem = 1872040000)
10:09:44.646 [main] INFO jtorch.cpu.MyTensor$ - creating 140312570032736
10:09:44.646 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312600519200 (mem = 1872040000)
10:09:44.649 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312600519008 (mem = 1836040000)
10:09:44.651 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312570032288 (mem = 1800040000)
10:09:44.654 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312600518880 (mem = 1764040000)
10:09:44.655 [main] INFO jtorch.cpu.MyTensor$ - creating 140312568924912
10:09:44.656 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312600518688 (mem = 1764040000)
10:09:44.660 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312600518496 (mem = 1728040000)
10:09:44.663 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312569326256 (mem = 1692040000)
10:09:44.663 [main] INFO jtorch.cpu.MyTensor$ - creating 140312600178816
10:09:44.666 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312570032096 (mem = 1692040000)
10:09:44.668 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312570031904 (mem = 1656040000)
10:09:44.671 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312570031712 (mem = 1620040000)
10:09:44.671 [main] INFO jtorch.cpu.MyTensor$ - creating 140312600519232
10:09:44.673 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312569326064 (mem = 1620040000)
10:09:44.676 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312569325872 (mem = 1584040000)
10:09:44.680 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312569325680 (mem = 1548040000)
10:09:44.681 [main] INFO jtorch.cpu.MyTensor$ - creating 140312600519584
10:09:44.684 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312569325488 (mem = 1548040000)
10:09:44.687 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312570031520 (mem = 1512040000)
10:09:44.690 [main] INFO jtorch.cpu.MyTensor$ - creating 140312569325680
10:09:44.691 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312570031328 (mem = 1512040000)
10:09:44.694 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312570031136 (mem = 1476040000)
10:09:44.698 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312570030944 (mem = 1440040000)
10:09:44.699 [main] INFO jtorch.cpu.MyTensor$ - creating 140312570031328
10:09:44.702 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312569325296 (mem = 1440040000)
10:09:44.705 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312568056672 (mem = 1404040000)
10:09:44.708 [main] INFO jtorch.cpu.MyTensor$ - creating 140312570030992
10:09:44.711 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312568056480 (mem = 1404040000)
10:09:44.714 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312568056288 (mem = 1368040000)
10:09:44.716 [main] INFO jtorch.cpu.MyTensor$ - creating 140312570031520
10:09:44.718 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312568056096 (mem = 1368040000)
10:09:44.721 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312569325104 (mem = 1332040000)
10:09:44.725 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312569324912 (mem = 1296040000)
10:09:44.726 [main] INFO jtorch.cpu.MyTensor$ - creating 140312570031904
10:09:44.729 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312569324720 (mem = 1296040000)
10:09:44.732 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312569324528 (mem = 1260040000)
10:09:44.736 [main] INFO jtorch.cpu.MyTensor$ - creating 140312567876352
10:09:44.736 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312569324336 (mem = 1260040000)
10:09:44.739 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312569324144 (mem = 1224040000)
10:09:44.743 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312569323952 (mem = 1188040000)
10:09:44.744 [main] INFO jtorch.cpu.MyTensor$ - creating 140312600215744
10:09:44.745 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312569323792 (mem = 1188040000)
10:09:44.750 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312568055904 (mem = 1152040000)
10:09:44.753 [main] INFO jtorch.cpu.MyTensor$ - creating 140312600239328
10:09:44.755 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312570030752 (mem = 1152040000)
10:09:44.758 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312569323648 (mem = 1116040000)
10:09:44.761 [main] INFO jtorch.cpu.MyTensor$ - creating 140312600205696
10:09:44.762 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312570797328 (mem = 1116040000)
10:09:44.765 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312600517904 (mem = 1080040000)
10:09:44.770 [main] INFO jtorch.cpu.MyTensor$ - creating 140312569323696
10:09:44.771 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312569323504 (mem = 1080040000)
10:09:44.773 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312570030560 (mem = 1044040000)
10:09:44.777 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312600516688 (mem = 1008040000)
10:09:44.779 [main] INFO jtorch.cpu.MyTensor$ - creating 140312570797376
10:09:44.782 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312598095216 (mem = 1008040000)
10:09:44.784 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312570030432 (mem = 972040000)
10:09:44.788 [main] INFO jtorch.cpu.MyTensor$ - creating 140312570032160
10:09:44.788 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312599094448 (mem = 972040000)
10:09:44.791 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312568055760 (mem = 936040000)
10:09:44.795 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312569320816 (mem = 900040000)
10:09:44.798 [main] INFO jtorch.cpu.MyTensor$ - creating 140312567928528
10:09:44.800 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312598095024 (mem = 900040000)
10:09:44.803 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312569321376 (mem = 864040000)
10:09:44.806 [main] INFO jtorch.cpu.MyTensor$ - creating 140312569323408
10:09:44.807 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312570030240 (mem = 864040000)
10:09:44.810 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312570028640 (mem = 828040000)
10:09:44.815 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312597006352 (mem = 792040000)
10:09:44.816 [main] INFO jtorch.cpu.MyTensor$ - creating 140312600276704
10:09:44.817 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312597006160 (mem = 792040000)
10:09:44.824 [main] INFO jtorch.cpu.MyTensor$ - creating 140312600273680
10:09:44.831 [main] INFO jtorch.cpu.MyTensor$ - creating 140312600284592
10:09:44.842 [main] INFO jtorch.cpu.MyTensor$ - creating 140312569324560
10:09:44.852 [main] INFO jtorch.cpu.MyTensor$ - creating 140312569324768
10:09:44.861 [main] INFO jtorch.cpu.MyTensor$ - creating 140312569325104
10:09:44.870 [main] INFO jtorch.cpu.MyTensor$ - creating 140312568786720
10:09:44.879 [main] INFO jtorch.cpu.MyTensor$ - creating 140312568812160
10:09:44.888 [main] INFO jtorch.cpu.MyTensor$ - creating 140312568803280
10:09:44.897 [main] INFO jtorch.cpu.MyTensor$ - creating 140312568828576
10:09:44.907 [main] INFO jtorch.cpu.MyTensor$ - creating 140312568861136
10:09:44.917 [main] INFO jtorch.cpu.MyTensor$ - creating 140312568884000
10:09:44.925 [main] INFO jtorch.cpu.MyTensor$ - creating 140312568895328
10:09:44.933 [main] INFO jtorch.cpu.MyTensor$ - creating 140312568907584
10:09:44.943 [main] INFO jtorch.cpu.MyTensor$ - creating 140312598095088
10:09:44.952 [main] INFO jtorch.cpu.MyTensor$ - creating 140312597006224
10:09:44.961 [main] INFO jtorch.cpu.MyTensor$ - creating 140312568055808
10:09:44.970 [main] INFO jtorch.cpu.MyTensor$ - creating 140312568056480
10:09:44.988 [main] INFO jtorch.cpu.MyTensor$ - creating 140312570030144
10:09:45.007 [main] INFO jtorch.cpu.MyTensor$ - creating 140312570030432
10:09:45.008 [main] INFO jtorch.cpu.NaiveMemoryManagement$ - DONE
10:09:45.008 [main] INFO jtorch.cpu.NaiveMemoryManagement$ - 140312567714048
10:09:45.009 [main] INFO jtorch.cpu.NaiveMemoryManagement$ - jtorch.cpu.SWIGTYPE_p_THFloatTensor@31dc339b
10:09:45.009 [main] INFO jtorch.cpu.NaiveMemoryManagement$ - torch.xTensor of size 100x100
10:09:45.011 [main] INFO jtorch.cpu.NaiveMemoryManagement$ - 0.0
10:09:45.011 [main] INFO jtorch.cpu.NaiveMemoryManagement$ - *** starting parallel ****************************
10:09:45.011 [main] INFO jtorch.cpu.MyTensor$ - creating 140312569312608
10:09:45.129 [scala-execution-context-global-17] INFO jtorch.cpu.MyTensor$ - creating 140312597016736
10:09:45.132 [scala-execution-context-global-12] INFO jtorch.cpu.MyTensor$ - creating 140312599096368
10:09:45.133 [scala-execution-context-global-13] INFO jtorch.cpu.MyTensor$ - creating 140312568136432
10:09:45.133 [scala-execution-context-global-16] INFO jtorch.cpu.MyTensor$ - creating 140312570781920
10:09:45.134 [scala-execution-context-global-11] INFO jtorch.cpu.MyTensor$ - creating 140312569359488
10:09:45.134 [scala-execution-context-global-18] INFO jtorch.cpu.MyTensor$ - creating 140312599096912
10:09:45.134 [scala-execution-context-global-14] INFO jtorch.cpu.MyTensor$ - creating 140312570064560
10:09:45.141 [scala-execution-context-global-15] INFO jtorch.cpu.MyTensor$ - creating 140312598105280
10:09:45.166 [scala-execution-context-global-16] INFO jtorch.cpu.MyTensor$ - creating 140312570064784
10:09:45.167 [scala-execution-context-global-13] INFO jtorch.cpu.MyTensor$ - creating 140312598114992
10:09:45.169 [scala-execution-context-global-14] INFO jtorch.cpu.MyTensor$ - creating 140312569360576
10:09:45.170 [scala-execution-context-global-17] INFO jtorch.cpu.MyTensor$ - creating 140312597028080
10:09:45.170 [scala-execution-context-global-18] INFO jtorch.cpu.MyTensor$ - creating 140312569360416
10:09:45.173 [scala-execution-context-global-12] INFO jtorch.cpu.MyTensor$ - creating 140312600551472
10:09:45.177 [scala-execution-context-global-11] INFO jtorch.cpu.MyTensor$ - creating 140312597032208
10:09:45.179 [scala-execution-context-global-15] INFO jtorch.cpu.MyTensor$ - creating 140312570065888
10:09:45.203 [scala-execution-context-global-16] INFO jtorch.cpu.MyTensor$ - creating 140312569372928
10:09:45.204 [scala-execution-context-global-13] INFO jtorch.cpu.MyTensor$ - creating 140312600557264
10:09:45.205 [scala-execution-context-global-14] INFO jtorch.cpu.MyTensor$ - creating 140312569373120
10:09:45.229 [scala-execution-context-global-15] INFO jtorch.cpu.MyTensor$ - creating 140312570066080
10:09:45.205 [scala-execution-context-global-17] INFO jtorch.cpu.MyTensor$ - creating 140312597033408
10:09:45.229 [scala-execution-context-global-18] INFO jtorch.cpu.MyTensor$ - creating 140312599099936
10:09:45.229 [scala-execution-context-global-11] INFO jtorch.cpu.MyTensor$ - creating 140312570821760
10:09:45.229 [scala-execution-context-global-12] INFO jtorch.cpu.MyTensor$ - creating 140312600564944
10:09:45.229 [scala-execution-context-global-16] INFO jtorch.cpu.MyTensor$ - creating 140312599100128
10:09:45.270 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312570064784 (mem = 2304080000)
10:09:45.273 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312570821760 (mem = 2304080000)
10:09:45.276 [scala-execution-context-global-17] INFO jtorch.cpu.MyTensor$ - creating 140312570809600
10:09:45.287 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312600564944 (mem = 2304080000)
10:09:45.289 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312597033408 (mem = 2268080000)
10:09:45.295 [scala-execution-context-global-15] INFO jtorch.cpu.MyTensor$ - creating 140312570042528
10:09:45.305 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312599100128 (mem = 2268080000)
10:09:45.310 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312570809600 (mem = 2232080000)
10:09:45.312 [scala-execution-context-global-16] INFO jtorch.cpu.MyTensor$ - creating 140312569374992
10:09:45.305 [scala-execution-context-global-17] INFO jtorch.cpu.MyTensor$ - creating 140312569375184
10:09:45.322 [scala-execution-context-global-13] INFO jtorch.cpu.MyTensor$ - creating 140312600518544
10:09:45.322 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312599099936 (mem = 2232080000)
10:09:45.322 [scala-execution-context-global-15] INFO jtorch.cpu.MyTensor$ - creating 140312568154448
10:09:45.328 [scala-execution-context-global-18] INFO jtorch.cpu.MyTensor$ - creating 140312569329632
10:09:45.322 [scala-execution-context-global-11] INFO jtorch.cpu.MyTensor$ - creating 140312597034720
10:09:45.338 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312569374992 (mem = 2268080000)
10:09:45.322 [scala-execution-context-global-14] INFO jtorch.cpu.MyTensor$ - creating 140312599100704
10:09:45.322 [scala-execution-context-global-12] INFO jtorch.cpu.MyTensor$ - creating 140312568081424
10:09:45.367 [scala-execution-context-global-17] INFO jtorch.cpu.MyTensor$ - creating 140312568906448
10:09:45.357 [scala-execution-context-global-16] INFO jtorch.cpu.MyTensor$ - creating 140312600564992
10:09:45.375 [scala-execution-context-global-13] INFO jtorch.cpu.MyTensor$ - creating 140312570808848
10:09:45.375 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312569329632 (mem = 2412080000)
10:09:45.383 [scala-execution-context-global-18] INFO jtorch.cpu.MyTensor$ - creating 140312598107376
10:09:45.396 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312568906448 (mem = 2484080000)
10:09:45.409 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312600564992 (mem = 2484080000)
10:09:45.413 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312570808848 (mem = 2448080000)
10:09:45.415 [scala-execution-context-global-15] INFO jtorch.cpu.MyTensor$ - creating 140312600566576
10:09:45.427 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312568081424 (mem = 2448080000)
10:09:45.415 [scala-execution-context-global-13] INFO jtorch.cpu.MyTensor$ - creating 140312570064832
10:09:45.438 [scala-execution-context-global-18] INFO jtorch.cpu.MyTensor$ - creating 140312568156048
10:09:45.427 [scala-execution-context-global-11] INFO jtorch.cpu.MyTensor$ - creating 140312600566976
10:09:45.438 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312598107376 (mem = 2448080000)
10:09:45.460 [scala-execution-context-global-12] INFO jtorch.cpu.MyTensor$ - creating 140312597033408
10:09:45.460 [scala-execution-context-global-16] INFO jtorch.cpu.MyTensor$ - creating 140312568155856
10:09:45.460 [scala-execution-context-global-17] INFO jtorch.cpu.MyTensor$ - creating 140312570821088
10:09:45.460 [scala-execution-context-global-15] INFO jtorch.cpu.MyTensor$ - creating 140312600568592
10:09:45.461 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312570064832 (mem = 2520080000)
10:09:45.480 [scala-execution-context-global-13] INFO jtorch.cpu.MyTensor$ - creating 140312599099984
10:09:45.490 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312597033408 (mem = 2592080000)
10:09:45.461 [scala-execution-context-global-14] INFO jtorch.cpu.MyTensor$ - creating 140312570069008
10:09:45.490 [scala-execution-context-global-18] INFO jtorch.cpu.MyTensor$ - creating 140312570068880
10:09:45.506 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312600568592 (mem = 2628080000)
10:09:45.514 [scala-execution-context-global-12] INFO jtorch.cpu.MyTensor$ - creating 140312569329632
10:09:45.514 [scala-execution-context-global-16] INFO jtorch.cpu.MyTensor$ - creating 140312568081472
10:09:45.525 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312570069008 (mem = 2664080000)
10:09:45.539 [scala-execution-context-global-15] INFO jtorch.cpu.MyTensor$ - creating 140312570069264
10:09:45.539 [scala-execution-context-global-11] INFO jtorch.cpu.MyTensor$ - creating 140312570818144
10:09:45.539 [scala-execution-context-global-14] INFO jtorch.cpu.MyTensor$ - creating 140312570069456
10:09:45.546 [scala-execution-context-global-17] INFO jtorch.cpu.MyTensor$ - creating 140312568156432
10:09:45.539 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312570068880 (mem = 2664080000)
10:09:45.546 [scala-execution-context-global-18] INFO jtorch.cpu.MyTensor$ - creating 140312570821920
10:09:45.571 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312570069264 (mem = 2772080000)
10:09:45.571 [scala-execution-context-global-13] INFO jtorch.cpu.MyTensor$ - creating 140312597033456
10:09:45.571 [scala-execution-context-global-16] INFO jtorch.cpu.MyTensor$ - creating 140312568156832
10:09:45.590 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312568156432 (mem = 2808080000)
10:09:45.590 [scala-execution-context-global-15] INFO jtorch.cpu.MyTensor$ - creating 140312599101616
10:09:45.590 [scala-execution-context-global-12] INFO jtorch.cpu.MyTensor$ - creating 140312599101456
10:09:45.602 [scala-execution-context-global-11] INFO jtorch.cpu.MyTensor$ - creating 140312568157024
10:09:45.602 [scala-execution-context-global-17] INFO jtorch.cpu.MyTensor$ - creating 140312597034864
10:09:45.608 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312570821920 (mem = 2844080000)
10:09:45.631 [scala-execution-context-global-13] INFO jtorch.cpu.MyTensor$ - creating 140312569377264
10:09:45.631 [scala-execution-context-global-18] INFO jtorch.cpu.MyTensor$ - creating 140312597035056
10:09:45.631 [scala-execution-context-global-14] INFO jtorch.cpu.MyTensor$ - creating 140312599101808
10:09:45.651 [scala-execution-context-global-16] INFO jtorch.cpu.MyTensor$ - creating 140312570822592
10:09:45.641 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312568157024 (mem = 2952080000)
10:09:45.672 [scala-execution-context-global-17] INFO jtorch.cpu.MyTensor$ - creating 140312570822752
10:09:45.672 [scala-execution-context-global-11] INFO jtorch.cpu.MyTensor$ - creating 140312570821984
10:09:45.672 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312569377264 (mem = 3024080000)
10:09:45.679 [scala-execution-context-global-13] INFO jtorch.cpu.MyTensor$ - creating 140312570822944
10:09:45.672 [scala-execution-context-global-15] INFO jtorch.cpu.MyTensor$ - creating 140312568156496
10:09:45.686 [scala-execution-context-global-18] INFO jtorch.cpu.MyTensor$ - creating 140312570068896
10:09:45.679 [scala-execution-context-global-12] INFO jtorch.cpu.MyTensor$ - creating 140312569377456
10:09:45.714 [scala-execution-context-global-17] INFO jtorch.cpu.MyTensor$ - creating 140312569377328
10:09:45.696 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312570822752 (mem = 3096080000)
10:09:45.730 [scala-execution-context-global-11] INFO jtorch.cpu.MyTensor$ - creating 140312599102192
10:09:45.730 [scala-execution-context-global-16] INFO jtorch.cpu.MyTensor$ - creating 140312598106240
10:09:45.724 [scala-execution-context-global-14] INFO jtorch.cpu.MyTensor$ - creating 140312599102000
10:09:45.748 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312570068896 (mem = 3276080000)
10:09:45.741 [scala-execution-context-global-15] INFO jtorch.cpu.MyTensor$ - creating 140312597035248
10:09:45.758 [scala-execution-context-global-18] INFO jtorch.cpu.MyTensor$ - creating 140312599102384
10:09:45.758 [scala-execution-context-global-13] INFO jtorch.cpu.MyTensor$ - creating 140312600568640
10:09:45.770 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312598106240 (mem = 3384080000)
10:09:45.785 [scala-execution-context-global-12] INFO jtorch.cpu.MyTensor$ - creating 140312597035440
10:09:45.785 [scala-execution-context-global-11] INFO jtorch.cpu.MyTensor$ - creating 140312599102576
10:09:45.795 [scala-execution-context-global-17] INFO jtorch.cpu.MyTensor$ - creating 140312569378064
10:09:45.805 [scala-execution-context-global-14] INFO jtorch.cpu.MyTensor$ - creating 140312598106288
10:09:45.814 [scala-execution-context-global-18] INFO jtorch.cpu.MyTensor$ - creating 140312597035728
10:09:45.785 [scala-execution-context-global-16] INFO jtorch.cpu.MyTensor$ - creating 140312598112896
10:09:45.795 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312597035248 (mem = 3384080000)
10:09:45.822 [scala-execution-context-global-13] INFO jtorch.cpu.MyTensor$ - creating 140312568157088
10:09:45.844 [scala-execution-context-global-15] INFO jtorch.cpu.MyTensor$ - creating 140312597035616
10:09:45.844 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312599102576 (mem = 3564080000)
10:09:45.844 [scala-execution-context-global-11] INFO jtorch.cpu.MyTensor$ - creating 140312569378256
10:09:45.865 [scala-execution-context-global-18] INFO jtorch.cpu.MyTensor$ - creating 140312600569424
10:09:45.844 [scala-execution-context-global-12] INFO jtorch.cpu.MyTensor$ - creating 140312599102768
10:09:45.865 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312598106288 (mem = 3600080000)
10:09:45.883 [scala-execution-context-global-14] INFO jtorch.cpu.MyTensor$ - creating 140312568157344
10:09:45.883 [scala-execution-context-global-15] INFO jtorch.cpu.MyTensor$ - creating 140312600570096
10:09:45.894 [scala-execution-context-global-17] INFO jtorch.cpu.MyTensor$ - creating 140312598116416
10:09:45.894 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312600569424 (mem = 3708080000)
10:09:45.894 [scala-execution-context-global-13] INFO jtorch.cpu.MyTensor$ - creating 140312569378448
10:09:45.894 [scala-execution-context-global-16] INFO jtorch.cpu.MyTensor$ - creating 140312599102640
10:09:45.921 [scala-execution-context-global-18] INFO jtorch.cpu.MyTensor$ - creating 140312570823392
10:09:45.928 [scala-execution-context-global-11] INFO jtorch.cpu.MyTensor$ - creating 140312600570272
10:09:45.913 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312568157344 (mem = 3744080000)
10:09:45.913 [scala-execution-context-global-12] INFO jtorch.cpu.MyTensor$ - creating 140312570823552
10:09:45.935 [scala-execution-context-global-14] INFO jtorch.cpu.MyTensor$ - creating 140312600570384
10:09:45.957 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312570823392 (mem = 3888080000)
10:09:45.959 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312600570272 (mem = 3852080000)
10:09:45.961 [main] INFO jtorch.cpu.NaiveMemoryManagement$ - DONE
10:09:45.961 [main] INFO jtorch.cpu.NaiveMemoryManagement$ - 140312569312608
10:09:45.961 [main] INFO jtorch.cpu.NaiveMemoryManagement$ - jtorch.cpu.SWIGTYPE_p_THFloatTensor@2de8284b
10:09:45.961 [main] INFO jtorch.cpu.NaiveMemoryManagement$ - torch.xTensor of size 100x100
10:09:45.961 [main] INFO jtorch.cpu.NaiveMemoryManagement$ - 0.0
10:09:45.963 [Finalizer] INFO jtorch.cpu.MyTensor - freeing 140312570823552 (mem = 3816080000)

Process finished with exit code 0

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment