Skip to content

Instantly share code, notes, and snippets.

@nickgnd
Created July 18, 2023 14:15
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save nickgnd/24abdee5cdf989429953e59102b34aeb to your computer and use it in GitHub Desktop.
Save nickgnd/24abdee5cdf989429953e59102b34aeb to your computer and use it in GitHub Desktop.
Reply to Kenichi (Programming ML - chapter 3 with nx)

Chapter 3: Walking the Gradient

Mix.install([
  {:vega_lite, "~> 0.1.6"},
  {:kino, "~> 0.8.1"},
  {:kino_vega_lite, "~> 0.1.7"},
  {:explorer, "~> 0.5.6"},
  {:kino_explorer, "~> 0.1.4"},
  {:nx, "~> 0.5"}
])
Resolving Hex dependencies...
Dependency resolution completed:
New:
  castore 1.0.3
  complex 0.5.0
  explorer 0.5.7
  kino 0.8.1
  kino_explorer 0.1.5
  kino_vega_lite 0.1.9
  nx 0.5.3
  rustler_precompiled 0.6.2
  table 0.1.2
  table_rex 3.1.1
  telemetry 1.2.1
  vega_lite 0.1.7
* Getting vega_lite (Hex package)
* Getting kino (Hex package)
* Getting kino_vega_lite (Hex package)
* Getting explorer (Hex package)
* Getting kino_explorer (Hex package)
* Getting nx (Hex package)
* Getting complex (Hex package)
* Getting telemetry (Hex package)
* Getting rustler_precompiled (Hex package)
* Getting table (Hex package)
* Getting table_rex (Hex package)
* Getting castore (Hex package)
==> table
Compiling 5 files (.ex)
Generated table app
==> vega_lite
Compiling 5 files (.ex)
Generated vega_lite app
===> Analyzing applications...
===> Compiling telemetry
==> complex
Compiling 2 files (.ex)
Generated complex app
==> nx
Compiling 31 files (.ex)
Generated nx app
==> kino
Compiling 37 files (.ex)
Generated kino app
==> kino_vega_lite
Compiling 4 files (.ex)
Generated kino_vega_lite app
==> table_rex
Compiling 7 files (.ex)
Generated table_rex app
==> castore
Compiling 1 file (.ex)
Generated castore app
==> rustler_precompiled
Compiling 4 files (.ex)
Generated rustler_precompiled app
==> explorer
Compiling 19 files (.ex)

16:05:47.989 [debug] Copying NIF from cache and extracting to /Users/nicolo.gnudi/code/personal/Library/Caches/mix/installs/elixir-1.14.2-erts-13.0.4/cef305cfa5849d5cca8a9505684ac16a/_build/dev/lib/explorer/priv/native/libexplorer-v0.5.7-nif-2.16-x86_64-apple-darwin.so
Generated explorer app
==> kino_explorer
Compiling 4 files (.ex)
Generated kino_explorer app
:ok

Read the data

data =
  __DIR__
  |> Path.join("pizza.txt")
  |> Path.expand()
  |> File.read!()
  # convert any two or more spaces into a comma
  |> String.replace(~r/[[:blank:]]{2,}/, ",")
  |> Explorer.DataFrame.load_csv!()

Linear regression with bias

☝️ From chapter 2

defmodule C2.LinearRegressionWithBias do
  @doc """
  Returns a list of predictions.
  """
  def predict([item | rest], weight, bias) do
    [predict(item, weight, bias) | predict(rest, weight, bias)]
  end

  def predict([], _weight, _bias), do: []

  # The function predicts the pizzas from the reservations.
  # To be more precise, it takes the input variable, the weight
  # and the bias, and it uses them to calculate ŷ.
  def predict(x, weight, bias), do: x * weight + bias

  @doc """
  Returns the mean squared error.
  """
  def loss(x, y, weight, bias) when is_list(x) and is_list(y) do
    predictions = predict(x, weight, bias)
    errors = Enum.zip_with([predictions, y], fn [pr, y] -> pr - y end)
    squared_error = square(errors)
    avg(squared_error)
  end

  def train(x, y, iterations, lr) when is_list(x) and is_list(y) do
    Enum.reduce(0..(iterations - 1), %{weight: 0, bias: 0}, fn i, %{weight: w, bias: b} = acc ->
      current_loss = loss(x, y, w, b)

      IO.puts("Iteration #{i} => Loss: #{current_loss}")

      cond do
        loss(x, y, w + lr, b) < current_loss -> %{acc | weight: w + lr}
        loss(x, y, w - lr, b) < current_loss -> %{acc | weight: w - lr}
        loss(x, y, w, b + lr) < current_loss -> %{acc | bias: b + lr}
        loss(x, y, w, b - lr) < current_loss -> %{acc | bias: b - lr}
        true -> acc
      end
    end)
  end

  defp square(list) when is_list(list) do
    for i <- list, do: i * i
  end

  defp avg(list) when is_list(list) do
    Enum.sum(list) / length(list)
  end
end
{:module, C2.LinearRegressionWithBias, <<70, 79, 82, 49, 0, 0, 18, ...>>, {:avg, 1}}

Plot the loss curve

# Extract "Reservations" and "Pizzas" from the dataframe
x = Explorer.Series.to_list(data["Reservations"])
y = Explorer.Series.to_list(data["Pizzas"])
[33, 16, 32, 51, 27, 16, 34, 17, 29, 15, 15, 32, 22, 37, 13, 44, 16, 21, 37, 30, 26, 34, 23, 39, 27,
 37, 17, 18, 25, 23]
alias VegaLite, as: Vl

# Generate a sequence that will be used as `weight`
# From -1 to -4, step 0.01
weights = Enum.map(-100..400, &(&1 / 100))

# Compute the loss for each weight, with bias=0
losses = Enum.map(weights, &C2.LinearRegressionWithBias.loss(x, y, &1, 0))

# Get the min loss index
min_loss_index = Enum.find_index(losses, &(&1 == Enum.min(losses)))

Vl.new(width: 600, height: 400)
|> Vl.layers([
  Vl.new()
  |> Vl.data_from_values(weight: weights, loss: losses)
  |> Vl.mark(:line)
  |> Vl.encode_field(:x, "weight", type: :quantitative)
  |> Vl.encode_field(:y, "loss", type: :quantitative),
  Vl.new()
  |> Vl.data_from_values(
    weight: [Enum.at(weights, min_loss_index)],
    min_loss: [Enum.at(losses, min_loss_index)]
  )
  |> Vl.mark(:circle, tooltip: true, size: "100", color: "red")
  |> Vl.encode_field(:x, "weight", type: :quantitative)
  |> Vl.encode_field(:y, "min_loss", type: :quantitative, title: "loss")
])
{"$schema":"https://vega.github.io/schema/vega-lite/v5.json","height":400,"layer":[{"data":{"values":[{"loss":1838.4666666666667,"weight":-1.0},{"loss":1826.0445466666667,"weight":-0.99},{"loss":1813.6661866666666,"weight":-0.98},{"loss":1801.3315866666667,"weight":-0.97},{"loss":1789.0407466666666,"weight":-0.96},{"loss":1776.7936666666667,"weight":-0.95},{"loss":1764.5903466666668,"weight":-0.94},{"loss":1752.4307866666672,"weight":-0.93},{"loss":1740.3149866666668,"weight":-0.92},{"loss":1728.2429466666667,"weight":-0.91},{"loss":1716.2146666666667,"weight":-0.9},{"loss":1704.2301466666663,"weight":-0.89},{"loss":1692.2893866666666,"weight":-0.88},{"loss":1680.3923866666662,"weight":-0.87},{"loss":1668.5391466666672,"weight":-0.86},{"loss":1656.7296666666664,"weight":-0.85},{"loss":1644.9639466666674,"weight":-0.84},{"loss":1633.2419866666667,"weight":-0.83},{"loss":1621.5637866666668,"weight":-0.82},{"loss":1609.9293466666668,"weight":-0.81},{"loss":1598.3386666666663,"weight":-0.8},{"loss":1586.7917466666668,"weight":-0.79},{"loss":1575.2885866666663,"weight":-0.78},{"loss":1563.8291866666668,"weight":-0.77},{"loss":1552.413546666667,"weight":-0.76},{"loss":1541.0416666666667,"weight":-0.75},{"loss":1529.7135466666666,"weight":-0.74},{"loss":1518.429186666667,"weight":-0.73},{"loss":1507.1885866666667,"weight":-0.72},{"loss":1495.9917466666664,"weight":-0.71},{"loss":1484.8386666666665,"weight":-0.7},{"loss":1473.7293466666667,"weight":-0.69},{"loss":1462.6637866666672,"weight":-0.68},{"loss":1451.6419866666665,"weight":-0.67},{"loss":1440.6639466666666,"weight":-0.66},{"loss":1429.7296666666664,"weight":-0.65},{"loss":1418.8391466666662,"weight":-0.64},{"loss":1407.9923866666663,"weight":-0.63},{"loss":1397.1893866666665,"weight":-0.62},{"loss":1386.4301466666664,"weight":-0.61},{"loss":1375.7146666666665,"weight":-0.6},{"loss":1365.0429466666667,"weight":-0.59},{"loss":1354.4149866666664,"weight":-0.58},{"loss":1343.8307866666667,"weight":-0.57},{"loss":1333.2903466666671,"weight":-0.56},{"loss":1322.7936666666667,"weight":-0.55},{"loss":1312.3407466666665,"weight":-0.54},{"loss":1301.9315866666666,"weight":-0.53},{"loss":1291.5661866666665,"weight":-0.52},{"loss":1281.2445466666663,"weight":-0.51},{"loss":1270.9666666666667,"weight":-0.5},{"loss":1260.7325466666666,"weight":-0.49},{"loss":1250.5421866666666,"weight":-0.48},{"loss":1240.3955866666665,"weight":-0.47},{"loss":1230.2927466666667,"weight":-0.46},{"loss":1220.2336666666667,"weight":-0.45},{"loss":1210.2183466666668,"weight":-0.44},{"loss":1200.2467866666666,"weight":-0.43},{"loss":1190.3189866666664,"weight":-0.42},{"loss":1180.4349466666665,"weight":-0.41},{"loss":1170.5946666666669,"weight":-0.4},{"loss":1160.7981466666665,"weight":-0.39},{"loss":1151.045386666667,"weight":-0.38},{"loss":1141.3363866666666,"weight":-0.37},{"loss":1131.6711466666663,"weight":-0.36},{"loss":1122.0496666666668,"weight":-0.35},{"loss":1112.4719466666668,"weight":-0.34},{"loss":1102.9379866666668,"weight":-0.33},{"loss":1093.4477866666668,"weight":-0.32},{"loss":1084.0013466666667,"weight":-0.31},{"loss":1074.5986666666665,"weight":-0.3},{"loss":1065.2397466666666,"weight":-0.29},{"loss":1055.924586666667,"weight":-0.28},{"loss":1046.6531866666667,"weight":-0.27},{"loss":1037.4255466666666,"weight":-0.26},{"loss":1028.2416666666666,"weight":-0.25},{"loss":1019.1015466666668,"weight":-0.24},{"loss":1010.0051866666666,"weight":-0.23},{"loss":1000.9525866666668,"weight":-0.22},{"loss":991.9437466666665,"weight":-0.21},{"loss":982.9786666666664,"weight":-0.2},{"loss":974.0573466666667,"weight":-0.19},{"loss":965.1797866666668,"weight":-0.18},{"loss":956.3459866666665,"weight":-0.17},{"loss":947.5559466666666,"weight":-0.16},{"loss":938.8096666666664,"weight":-0.15},{"loss":930.1071466666666,"weight":-0.14},{"loss":921.4483866666669,"weight":-0.13},{"loss":912.8333866666667,"weight":-0.12},{"loss":904.2621466666667,"weight":-0.11},{"loss":895.7346666666667,"weight":-0.1},{"loss":887.2509466666667,"weight":-0.09},{"loss":878.8109866666668,"weight":-0.08},{"loss":870.4147866666667,"weight":-0.07},{"loss":862.0623466666664,"weight":-0.06},{"loss":853.7536666666666,"weight":-0.05},{"loss":845.4887466666667,"weight":-0.04},{"loss":837.2675866666668,"weight":-0.03},{"loss":829.0901866666667,"weight":-0.02},{"loss":820.9565466666664,"weight":-0.01},{"loss":812.8666666666667,"weight":0.0},{"loss":804.8205466666666,"weight":0.01},{"loss":796.8181866666667,"weight":0.02},{"loss":788.8595866666667,"weight":0.03},{"loss":780.9447466666669,"weight":0.04},{"loss":773.0736666666667,"weight":0.05},{"loss":765.2463466666667,"weight":0.06},{"loss":757.4627866666665,"weight":0.07},{"loss":749.7229866666669,"weight":0.08},{"loss":742.0269466666667,"weight":0.09},{"loss":734.3746666666666,"weight":0.1},{"loss":726.7661466666667,"weight":0.11},{"loss":719.2013866666668,"weight":0.12},{"loss":711.6803866666665,"weight":0.13},{"loss":704.2031466666667,"weight":0.14},{"loss":696.7696666666668,"weight":0.15},{"loss":689.3799466666667,"weight":0.16},{"loss":682.0339866666668,"weight":0.17},{"loss":674.7317866666667,"weight":0.18},{"loss":667.4733466666665,"weight":0.19},{"loss":660.2586666666666,"weight":0.2},{"loss":653.0877466666666,"weight":0.21},{"loss":645.9605866666666,"weight":0.22},{"loss":638.8771866666665,"weight":0.23},{"loss":631.8375466666669,"weight":0.24},{"loss":624.8416666666667,"weight":0.25},{"loss":617.8895466666668,"weight":0.26},{"loss":610.9811866666668,"weight":0.27},{"loss":604.1165866666666,"weight":0.28},{"loss":597.2957466666668,"weight":0.29},{"loss":590.5186666666666,"weight":0.3},{"loss":583.7853466666667,"weight":0.31},{"loss":577.0957866666666,"weight":0.32},{"loss":570.4499866666667,"weight":0.33},{"loss":563.8479466666666,"weight":0.34},{"loss":557.2896666666664,"weight":0.35},{"loss":550.7751466666667,"weight":0.36},{"loss":544.3043866666667,"weight":0.37},{"loss":537.8773866666666,"weight":0.38},{"loss":531.4941466666668,"weight":0.39},{"loss":525.1546666666667,"weight":0.4},{"loss":518.8589466666667,"weight":0.41},{"loss":512.6069866666666,"weight":0.42},{"loss":506.3987866666667,"weight":0.43},{"loss":500.23434666666674,"weight":0.44},{"loss":494.1136666666666,"weight":0.45},{"loss":488.03674666666666,"weight":0.46},{"loss":482.0035866666667,"weight":0.47},{"loss":476.0141866666666,"weight":0.48},{"loss":470.06854666666663,"weight":0.49},{"loss":464.1666666666667,"weight":0.5},{"loss":458.3085466666666,"weight":0.51},{"loss":452.4941866666667,"weight":0.52},{"loss":446.72358666666673,"weight":0.53},{"loss":440.99674666666664,"weight":0.54},{"loss":435.3136666666666,"weight":0.55},{"loss":429.67434666666657,"weight":0.56},{"loss":424.07878666666676,"weight":0.57},{"loss":418.52698666666663,"weight":0.58},{"loss":413.0189466666667,"weight":0.59},{"loss":407.5546666666668,"weight":0.6},{"loss":402.13414666666665,"weight":0.61},{"loss":396.7573866666666,"weight":0.62},{"loss":391.42438666666663,"weight":0.63},{"loss":386.13514666666674,"weight":0.64},{"loss":380.88966666666664,"weight":0.65},{"loss":375.68794666666673,"weight":0.66},{"loss":370.5299866666667,"weight":0.67},{"loss":365.41578666666663,"weight":0.68},{"loss":360.3453466666667,"weight":0.69},{"loss":355.31866666666673,"weight":0.7},{"loss":350.3357466666667,"weight":0.71},{"loss":345.39658666666674,"weight":0.72},{"loss":340.50118666666674,"weight":0.73},{"loss":335.64954666666677,"weight":0.74},{"loss":330.84166666666664,"weight":0.75},{"loss":326.07754666666665,"weight":0.76},{"loss":321.35718666666673,"weight":0.77},{"loss":316.68058666666667,"weight":0.78},{"loss":312.0477466666667,"weight":0.79},{"loss":307.45866666666666,"weight":0.8},{"loss":302.91334666666665,"weight":0.81},{"loss":298.4117866666668,"weight":0.82},{"loss":293.9539866666667,"weight":0.83},{"loss":289.5399466666667,"weight":0.84},{"loss":285.16966666666667,"weight":0.85},{"loss":280.8431466666667,"weight":0.86},{"loss":276.5603866666667,"weight":0.87},{"loss":272.3213866666667,"weight":0.88},{"loss":268.12614666666656,"weight":0.89},{"loss":263.9746666666667,"weight":0.9},{"loss":259.8669466666667,"weight":0.91},{"loss":255.80298666666658,"weight":0.92},{"loss":251.78278666666662,"weight":0.93},{"loss":247.80634666666677,"weight":0.94},{"loss":243.87366666666668,"weight":0.95},{"loss":239.98474666666667,"weight":0.96},{"loss":236.13958666666667,"weight":0.97},{"loss":232.33818666666667,"weight":0.98},{"loss":228.58054666666666,"weight":0.99},{"loss":224.86666666666667,"weight":1.0},{"loss":221.19654666666662,"weight":1.01},{"loss":217.57018666666661,"weight":1.02},{"loss":213.98758666666663,"weight":1.03},{"loss":210.44874666666664,"weight":1.04},{"loss":206.9536666666667,"weight":1.05},{"loss":203.50234666666665,"weight":1.06},{"loss":200.09478666666666,"weight":1.07},{"loss":196.73098666666664,"weight":1.08},{"loss":193.4109466666666,"weight":1.09},{"loss":190.13466666666665,"weight":1.1},{"loss":186.90214666666668,"weight":1.11},{"loss":183.71338666666662,"weight":1.12},{"loss":180.56838666666673,"weight":1.13},{"loss":177.46714666666668,"weight":1.14},{"loss":174.40966666666665,"weight":1.15},{"loss":171.39594666666667,"weight":1.16},{"loss":168.42598666666674,"weight":1.17},{"loss":165.4997866666667,"weight":1.18},{"loss":162.6173466666667,"weight":1.19},{"loss":159.77866666666668,"weight":1.2},{"loss":156.98374666666666,"weight":1.21},{"loss":154.23258666666666,"weight":1.22},{"loss":151.52518666666666,"weight":1.23},{"loss":148.86154666666667,"weight":1.24},{"loss":146.24166666666667,"weight":1.25},{"loss":143.66554666666667,"weight":1.26},{"loss":141.13318666666666,"weight":1.27},{"loss":138.64458666666667,"weight":1.28},{"loss":136.19974666666667,"weight":1.29},{"loss":133.79866666666666,"weight":1.3},{"loss":131.44134666666665,"weight":1.31},{"loss":129.12778666666665,"weight":1.32},{"loss":126.85798666666668,"weight":1.33},{"loss":124.6319466666667,"weight":1.34},{"loss":122.44966666666663,"weight":1.35},{"loss":120.31114666666664,"weight":1.36},{"loss":118.21638666666666,"weight":1.37},{"loss":116.16538666666669,"weight":1.38},{"loss":114.15814666666667,"weight":1.39},{"loss":112.19466666666668,"weight":1.4},{"loss":110.27494666666665,"weight":1.41},{"loss":108.39898666666669,"weight":1.42},{"loss":106.56678666666669,"weight":1.43},{"loss":104.77834666666668,"weight":1.44},{"loss":103.03366666666668,"weight":1.45},{"loss":101.3327466666667,"weight":1.46},{"loss":99.67558666666665,"weight":1.47},{"loss":98.06218666666668,"weight":1.48},{"loss":96.49254666666664,"weight":1.49},{"loss":94.96666666666667,"weight":1.5},{"loss":93.48454666666666,"weight":1.51},{"loss":92.04618666666669,"weight":1.52},{"loss":90.65158666666666,"weight":1.53},{"loss":89.30074666666664,"weight":1.54},{"loss":87.99366666666666,"weight":1.55},{"loss":86.73034666666668,"weight":1.56},{"loss":85.51078666666665,"weight":1.57},{"loss":84.33498666666664,"weight":1.58},{"loss":83.20294666666668,"weight":1.59},{"loss":82.11466666666664,"weight":1.6},{"loss":81.07014666666667,"weight":1.61},{"loss":80.06938666666665,"weight":1.62},{"loss":79.11238666666668,"weight":1.63},{"loss":78.19914666666666,"weight":1.64},{"loss":77.3296666666667,"weight":1.65},{"loss":76.50394666666665,"weight":1.66},{"loss":75.7219866666667,"weight":1.67},{"loss":74.98378666666667,"weight":1.68},{"loss":74.28934666666666,"weight":1.69},{"loss":73.63866666666668,"weight":1.7},{"loss":73.03174666666668,"weight":1.71},{"loss":72.46858666666667,"weight":1.72},{"loss":71.94918666666668,"weight":1.73},{"loss":71.47354666666669,"weight":1.74},{"loss":71.04166666666667,"weight":1.75},{"loss":70.65354666666664,"weight":1.76},{"loss":70.30918666666668,"weight":1.77},{"loss":70.00858666666667,"weight":1.78},{"loss":69.75174666666668,"weight":1.79},{"loss":69.53866666666667,"weight":1.8},{"loss":69.36934666666667,"weight":1.81},{"loss":69.24378666666665,"weight":1.82},{"loss":69.16198666666668,"weight":1.83},{"loss":69.12394666666667,"weight":1.84},{"loss":69.12966666666667,"weight":1.85},{"loss":69.17914666666667,"weight":1.86},{"loss":69.27238666666668,"weight":1.87},{"loss":69.40938666666666,"weight":1.88},{"loss":69.59014666666668,"weight":1.89},{"loss":69.81466666666667,"weight":1.9},{"loss":70.08294666666666,"weight":1.91},{"loss":70.3949866666667,"weight":1.92},{"loss":70.75078666666667,"weight":1.93},{"loss":71.15034666666665,"weight":1.94},{"loss":71.59366666666666,"weight":1.95},{"loss":72.0807466666667,"weight":1.96},{"loss":72.61158666666667,"weight":1.97},{"loss":73.18618666666664,"weight":1.98},{"loss":73.80454666666665,"weight":1.99},{"loss":74.46666666666667,"weight":2.0},{"loss":75.17254666666665,"weight":2.01},{"loss":75.92218666666668,"weight":2.02},{"loss":76.71558666666667,"weight":2.03},{"loss":77.55274666666665,"weight":2.04},{"loss":78.43366666666665,"weight":2.05},{"loss":79.35834666666666,"weight":2.06},{"loss":80.3267866666666,"weight":2.07},{"loss":81.33898666666667,"weight":2.08},{"loss":82.39494666666668,"weight":2.09},{"loss":83.49466666666667,"weight":2.1},{"loss":84.63814666666666,"weight":2.11},{"loss":85.82538666666669,"weight":2.12},{"loss":87.05638666666665,"weight":2.13},{"loss":88.33114666666668,"weight":2.14},{"loss":89.64966666666666,"weight":2.15},{"loss":91.0119466666667,"weight":2.16},{"loss":92.41798666666666,"weight":2.17},{"loss":93.8677866666667,"weight":2.18},{"loss":95.36134666666666,"weight":2.19},{"loss":96.89866666666667,"weight":2.2},{"loss":98.47974666666666,"weight":2.21},{"loss":100.10458666666672,"weight":2.22},{"loss":101.77318666666665,"weight":2.23},{"loss":103.48554666666669,"weight":2.24},{"loss":105.24166666666666,"weight":2.25},{"loss":107.04154666666662,"weight":2.26},{"loss":108.88518666666667,"weight":2.27},{"loss":110.77258666666661,"weight":2.28},{"loss":112.70374666666665,"weight":2.29},{"loss":114.67866666666664,"weight":2.3},{"loss":116.69734666666668,"weight":2.31},{"loss":118.7597866666666,"weight":2.32},{"loss":120.8659866666667,"weight":2.33},{"loss":123.01594666666665,"weight":2.34},{"loss":125.20966666666668,"weight":2.35},{"loss":127.44714666666664,"weight":2.36},{"loss":129.72838666666672,"weight":2.37},{"loss":132.05338666666663,"weight":2.38},{"loss":134.4221466666667,"weight":2.39},{"loss":136.83466666666664,"weight":2.4},{"loss":139.29094666666668,"weight":2.41},{"loss":141.7909866666667,"weight":2.42},{"loss":144.33478666666667,"weight":2.43},{"loss":146.9223466666667,"weight":2.44},{"loss":149.55366666666671,"weight":2.45},{"loss":152.22874666666667,"weight":2.46},{"loss":154.9475866666667,"weight":2.47},{"loss":157.71018666666666,"weight":2.48},{"loss":160.51654666666673,"weight":2.49},{"loss":163.36666666666667,"weight":2.5},{"loss":166.26054666666656,"weight":2.51},{"loss":169.19818666666657,"weight":2.52},{"loss":172.17958666666658,"weight":2.53},{"loss":175.20474666666672,"weight":2.54},{"loss":178.2736666666666,"weight":2.55},{"loss":181.38634666666667,"weight":2.56},{"loss":184.5427866666666,"weight":2.57},{"loss":187.74298666666667,"weight":2.58},{"loss":190.98694666666668,"weight":2.59},{"loss":194.2746666666668,"weight":2.6},{"loss":197.6061466666666,"weight":2.61},{"loss":200.98138666666674,"weight":2.62},{"loss":204.40038666666655,"weight":2.63},{"loss":207.86314666666664,"weight":2.64},{"loss":211.36966666666657,"weight":2.65},{"loss":214.91994666666662,"weight":2.66},{"loss":218.51398666666663,"weight":2.67},{"loss":222.1517866666667,"weight":2.68},{"loss":225.83334666666667,"weight":2.69},{"loss":229.55866666666677,"weight":2.7},{"loss":233.32774666666663,"weight":2.71},{"loss":237.1405866666667,"weight":2.72},{"loss":240.99718666666664,"weight":2.73},{"loss":244.89754666666678,"weight":2.74},{"loss":248.84166666666667,"weight":2.75},{"loss":252.82954666666657,"weight":2.76},{"loss":256.86118666666664,"weight":2.77},{"loss":260.9365866666666,"weight":2.78},{"loss":265.0557466666667,"weight":2.79},{"loss":269.2186666666666,"weight":2.8},{"loss":273.4253466666667,"weight":2.81},{"loss":277.6757866666666,"weight":2.82},{"loss":281.96998666666667,"weight":2.83},{"loss":286.30794666666674,"weight":2.84},{"loss":290.6896666666667,"weight":2.85},{"loss":295.11514666666665,"weight":2.86},{"loss":299.58438666666666,"weight":2.87},{"loss":304.0973866666666,"weight":2.88},{"loss":308.65414666666675,"weight":2.89},{"loss":313.25466666666665,"weight":2.9},{"loss":317.89894666666675,"weight":2.91},{"loss":322.5869866666668,"weight":2.92},{"loss":327.3187866666667,"weight":2.93},{"loss":332.09434666666675,"weight":2.94},{"loss":336.91366666666676,"weight":2.95},{"loss":341.7767466666666,"weight":2.96},{"loss":346.6835866666667,"weight":2.97},{"loss":351.6341866666667,"weight":2.98},{"loss":356.6285466666668,"weight":2.99},{"loss":361.6666666666667,"weight":3.0},{"loss":366.7485466666666,"weight":3.01},{"loss":371.87418666666673,"weight":3.02},{"loss":377.0435866666666,"weight":3.03},{"loss":382.2567466666667,"weight":3.04},{"loss":387.51366666666655,"weight":3.05},{"loss":392.81434666666667,"weight":3.06},{"loss":398.1587866666665,"weight":3.07},{"loss":403.5469866666666,"weight":3.08},{"loss":408.97894666666656,"weight":3.09},{"loss":414.45466666666687,"weight":3.1},{"loss":419.9741466666666,"weight":3.11},{"loss":425.5373866666668,"weight":3.12},{"loss":431.1443866666665,"weight":3.13},{"loss":436.7951466666667,"weight":3.14},{"loss":442.4896666666665,"weight":3.15},{"loss":448.2279466666667,"weight":3.16},{"loss":454.00998666666663,"weight":3.17},{"loss":459.8357866666669,"weight":3.18},{"loss":465.7053466666666,"weight":3.19},{"loss":471.61866666666685,"weight":3.2},{"loss":477.5757466666666,"weight":3.21},{"loss":483.5765866666666,"weight":3.22},{"loss":489.6211866666667,"weight":3.23},{"loss":495.7095466666668,"weight":3.24},{"loss":501.84166666666664,"weight":3.25},{"loss":508.0175466666664,"weight":3.26},{"loss":514.2371866666667,"weight":3.27},{"loss":520.5005866666665,"weight":3.28},{"loss":526.8077466666666,"weight":3.29},{"loss":533.1586666666666,"weight":3.3},{"loss":539.5533466666668,"weight":3.31},{"loss":545.9917866666666,"weight":3.32},{"loss":552.4739866666665,"weight":3.33},{"loss":558.9999466666666,"weight":3.34},{"loss":565.5696666666668,"weight":3.35},{"loss":572.1831466666666,"weight":3.36},{"loss":578.8403866666667,"weight":3.37},{"loss":585.5413866666665,"weight":3.38},{"loss":592.2861466666667,"weight":3.39},{"loss":599.0746666666663,"weight":3.4},{"loss":605.9069466666667,"weight":3.41},{"loss":612.7829866666664,"weight":3.42},{"loss":619.702786666667,"weight":3.43},{"loss":626.6663466666665,"weight":3.44},{"loss":633.6736666666668,"weight":3.45},{"loss":640.7247466666668,"weight":3.46},{"loss":647.8195866666669,"weight":3.47},{"loss":654.9581866666666,"weight":3.48},{"loss":662.1405466666669,"weight":3.49},{"loss":669.3666666666667,"weight":3.5},{"loss":676.6365466666666,"weight":3.51},{"loss":683.9501866666666,"weight":3.52},{"loss":691.3075866666667,"weight":3.53},{"loss":698.7087466666668,"weight":3.54},{"loss":706.1536666666665,"weight":3.55},{"loss":713.6423466666668,"weight":3.56},{"loss":721.1747866666664,"weight":3.57},{"loss":728.7509866666668,"weight":3.58},{"loss":736.3709466666664,"weight":3.59},{"loss":744.0346666666667,"weight":3.6},{"loss":751.7421466666667,"weight":3.61},{"loss":759.4933866666667,"weight":3.62},{"loss":767.2883866666666,"weight":3.63},{"loss":775.1271466666666,"weight":3.64},{"loss":783.0096666666662,"weight":3.65},{"loss":790.9359466666667,"weight":3.66},{"loss":798.9059866666664,"weight":3.67},{"loss":806.9197866666669,"weight":3.68},{"loss":814.9773466666663,"weight":3.69},{"loss":823.0786666666668,"weight":3.7},{"loss":831.2237466666663,"weight":3.71},{"loss":839.412586666667,"weight":3.72},{"loss":847.6451866666666,"weight":3.73},{"loss":855.9215466666667,"weight":3.74},{"loss":864.2416666666667,"weight":3.75},{"loss":872.6055466666663,"weight":3.76},{"loss":881.0131866666668,"weight":3.77},{"loss":889.4645866666667,"weight":3.78},{"loss":897.9597466666667,"weight":3.79},{"loss":906.4986666666666,"weight":3.8},{"loss":915.0813466666667,"weight":3.81},{"loss":923.7077866666666,"weight":3.82},{"loss":932.3779866666666,"weight":3.83},{"loss":941.0919466666666,"weight":3.84},{"loss":949.8496666666667,"weight":3.85},{"loss":958.6511466666668,"weight":3.86},{"loss":967.496386666667,"weight":3.87},{"loss":976.3853866666666,"weight":3.88},{"loss":985.3181466666665,"weight":3.89},{"loss":994.2946666666666,"weight":3.9},{"loss":1003.314946666667,"weight":3.91},{"loss":1012.3789866666667,"weight":3.92},{"loss":1021.4867866666666,"weight":3.93},{"loss":1030.6383466666666,"weight":3.94},{"loss":1039.8336666666667,"weight":3.95},{"loss":1049.0727466666665,"weight":3.96},{"loss":1058.3555866666673,"weight":3.97},{"loss":1067.682186666667,"weight":3.98},{"loss":1077.0525466666668,"weight":3.99},{"loss":1086.4666666666667,"weight":4.0}]},"encoding":{"x":{"field":"weight","type":"quantitative"},"y":{"field":"loss","type":"quantitative"}},"mark":"line"},{"data":{"values":[{"min_loss":69.12394666666667,"weight":1.84}]},"encoding":{"x":{"field":"weight","type":"quantitative"},"y":{"field":"min_loss","title":"loss","type":"quantitative"}},"mark":{"color":"red","size":"100","tooltip":true,"type":"circle"}}],"width":600}

Gradient Descent

defmodule C3.LinearRegressionWithoutBias do
  def predict([item | rest], weight, bias) do
    [predict(item, weight, bias) | predict(rest, weight, bias)]
  end

  def predict([], _weight, _bias), do: []
  def predict(x, weight, bias), do: x * weight + bias

  @doc """
  Returns the mean squared error.
  """
  def loss(x, y, weight, bias) when is_list(x) and is_list(y) do
    predictions = predict(x, weight, bias)
    errors = Enum.zip_with([predictions, y], fn [pr, y] -> pr - y end)
    squared_error = square(errors)
    avg(squared_error)
  end

  @doc """
  Returns the derivative of the loss curve
  """
  def gradient(x, y, weight) do
    predictions = predict(x, weight, 0)
    errors = Enum.zip_with([predictions, y], fn [pr, y] -> pr - y end)
    2 * avg(Enum.zip_with([x, errors], fn [x_item, error] -> x_item * error end))
  end

  def train(x, y, iterations, lr) when is_list(x) and is_list(y) do
    Enum.reduce(0..(iterations - 1), 0, fn i, weight ->
      IO.puts("Iteration #{i} => Loss: #{loss(x, y, weight, 0)}")
      weight - gradient(x, y, weight) * lr
    end)
  end

  defp square(list) when is_list(list) do
    for i <- list, do: i * i
  end

  defp avg(list) when is_list(list) do
    Enum.sum(list) / length(list)
  end
end
{:module, C3.LinearRegressionWithoutBias, <<70, 79, 82, 49, 0, 0, 18, ...>>, {:avg, 1}}

Train the system

iterations = Kino.Input.number("iterations", default: 100)
lr = Kino.Input.number("lr (learning rate)", default: 0.001)
iterations = Kino.Input.read(iterations)
lr = Kino.Input.read(lr)

weight = C3.LinearRegressionWithoutBias.train(x, y, iterations = 100, lr = 0.001)
Iteration 0 => Loss: 812.8666666666667
Iteration 1 => Loss: 304.3630879786667
Iteration 2 => Loss: 143.52657910198326
Iteration 3 => Loss: 92.6549949641037
Iteration 4 => Loss: 76.56463033997743
Iteration 5 => Loss: 71.47534841324155
Iteration 6 => Loss: 69.86564029693419
Iteration 7 => Loss: 69.35649966432483
Iteration 8 => Loss: 69.19546165926808
Iteration 9 => Loss: 69.14452634314573
Iteration 10 => Loss: 69.12841582049259
Iteration 11 => Loss: 69.12332016270709
Iteration 12 => Loss: 69.12170843794642
Iteration 13 => Loss: 69.12119865946183
Iteration 14 => Loss: 69.12103741970814
Iteration 15 => Loss: 69.12098642058018
Iteration 16 => Loss: 69.12097028987425
Iteration 17 => Loss: 69.12096518783261
Iteration 18 => Loss: 69.12096357408869
Iteration 19 => Loss: 69.12096306367155
Iteration 20 => Loss: 69.1209629022298
Iteration 21 => Loss: 69.12096285116675
Iteration 22 => Loss: 69.12096283501587
Iteration 23 => Loss: 69.12096282990741
Iteration 24 => Loss: 69.12096282829165
Iteration 25 => Loss: 69.1209628277806
Iteration 26 => Loss: 69.12096282761895
Iteration 27 => Loss: 69.12096282756781
Iteration 28 => Loss: 69.12096282755168
Iteration 29 => Loss: 69.12096282754655
Iteration 30 => Loss: 69.12096282754493
Iteration 31 => Loss: 69.12096282754443
Iteration 32 => Loss: 69.12096282754425
Iteration 33 => Loss: 69.1209628275442
Iteration 34 => Loss: 69.12096282754416
Iteration 35 => Loss: 69.12096282754418
Iteration 36 => Loss: 69.1209628275442
Iteration 37 => Loss: 69.12096282754418
Iteration 38 => Loss: 69.12096282754419
Iteration 39 => Loss: 69.12096282754418
Iteration 40 => Loss: 69.12096282754416
Iteration 41 => Loss: 69.12096282754419
Iteration 42 => Loss: 69.12096282754418
Iteration 43 => Loss: 69.12096282754418
Iteration 44 => Loss: 69.12096282754419
Iteration 45 => Loss: 69.12096282754418
Iteration 46 => Loss: 69.12096282754418
Iteration 47 => Loss: 69.12096282754416
Iteration 48 => Loss: 69.12096282754419
Iteration 49 => Loss: 69.12096282754418
Iteration 50 => Loss: 69.12096282754419
Iteration 51 => Loss: 69.12096282754419
Iteration 52 => Loss: 69.1209628275442
Iteration 53 => Loss: 69.12096282754416
Iteration 54 => Loss: 69.12096282754416
Iteration 55 => Loss: 69.12096282754419
Iteration 56 => Loss: 69.12096282754419
Iteration 57 => Loss: 69.12096282754418
Iteration 58 => Loss: 69.12096282754418
Iteration 59 => Loss: 69.12096282754418
Iteration 60 => Loss: 69.12096282754419
Iteration 61 => Loss: 69.12096282754419
Iteration 62 => Loss: 69.12096282754419
Iteration 63 => Loss: 69.12096282754416
Iteration 64 => Loss: 69.12096282754416
Iteration 65 => Loss: 69.12096282754416
Iteration 66 => Loss: 69.12096282754416
Iteration 67 => Loss: 69.12096282754416
Iteration 68 => Loss: 69.12096282754416
Iteration 69 => Loss: 69.12096282754416
Iteration 70 => Loss: 69.12096282754416
Iteration 71 => Loss: 69.12096282754416
Iteration 72 => Loss: 69.12096282754416
Iteration 73 => Loss: 69.12096282754416
Iteration 74 => Loss: 69.12096282754416
Iteration 75 => Loss: 69.12096282754416
Iteration 76 => Loss: 69.12096282754416
Iteration 77 => Loss: 69.12096282754416
Iteration 78 => Loss: 69.12096282754416
Iteration 79 => Loss: 69.12096282754416
Iteration 80 => Loss: 69.12096282754416
Iteration 81 => Loss: 69.12096282754416
Iteration 82 => Loss: 69.12096282754416
Iteration 83 => Loss: 69.12096282754416
Iteration 84 => Loss: 69.12096282754416
Iteration 85 => Loss: 69.12096282754416
Iteration 86 => Loss: 69.12096282754416
Iteration 87 => Loss: 69.12096282754416
Iteration 88 => Loss: 69.12096282754416
Iteration 89 => Loss: 69.12096282754416
Iteration 90 => Loss: 69.12096282754416
Iteration 91 => Loss: 69.12096282754416
Iteration 92 => Loss: 69.12096282754416
Iteration 93 => Loss: 69.12096282754416
Iteration 94 => Loss: 69.12096282754416
Iteration 95 => Loss: 69.12096282754416
Iteration 96 => Loss: 69.12096282754416
Iteration 97 => Loss: 69.12096282754416
Iteration 98 => Loss: 69.12096282754416
Iteration 99 => Loss: 69.12096282754416
1.8436928702010968

Putting Gradient Descent to the Test

defmodule C3.LinearRegressionWithBias do
  def predict([item | rest], weight, bias) do
    [predict(item, weight, bias) | predict(rest, weight, bias)]
  end

  def predict([], _weight, _bias), do: []
  def predict(x, weight, bias), do: x * weight + bias

  @doc """
  Returns the mean squared error.
  """
  def loss(x, y, weight, bias) when is_list(x) and is_list(y) do
    predictions = predict(x, weight, bias)
    errors = Enum.zip_with([predictions, y], fn [pr, y] -> pr - y end)
    squared_error = square(errors)
    avg(squared_error)
  end

  @doc """
  Returns the derivative of the loss curve
  """
  def gradient(x, y, weight, bias) do
    predictions = predict(x, weight, bias)
    errors = Enum.zip_with([predictions, y], fn [pr, y] -> pr - y end)

    w_gradient = 2 * avg(Enum.zip_with([x, errors], fn [x_item, error] -> x_item * error end))
    b_gradient = 2 * avg(errors)

    {w_gradient, b_gradient}
  end

  def train(x, y, iterations, lr) when is_list(x) and is_list(y) do
    Enum.reduce(0..(iterations - 1), %{weight: 0, bias: 0}, fn i, %{weight: weight, bias: bias} ->
      IO.puts("Iteration #{i} => Loss: #{loss(x, y, weight, bias)}")

      {w_gradient, b_gradient} = gradient(x, y, weight, bias)
      %{weight: weight - w_gradient * lr, bias: bias - b_gradient * lr}
    end)
  end

  defp square(list) when is_list(list) do
    for i <- list, do: i * i
  end

  defp avg(list) when is_list(list) do
    Enum.sum(list) / length(list)
  end
end
{:module, C3.LinearRegressionWithBias, <<70, 79, 82, 49, 0, 0, 20, ...>>, {:avg, 1}}

Train the system

iterations = Kino.Input.number("iterations", default: 20_000)
lr = Kino.Input.number("lr (learning rate)", default: 0.001)
iterations = Kino.Input.read(iterations)
lr = Kino.Input.read(lr)

%{weight: weight, bias: bias} =
  C3.LinearRegressionWithBias.train(x, y, iterations = iterations, lr = lr)
...
Iteration 19001 => Loss: 22.842736811406905
Iteration 19002 => Loss: 22.842736811325917
Iteration 19003 => Loss: 22.842736811245
Iteration 19004 => Loss: 22.842736811164183
Iteration 19005 => Loss: 22.842736811083455
Iteration 19006 => Loss: 22.842736811002794
Iteration 19007 => Loss: 22.84273681092223
Iteration 19008 => Loss: 22.84273681084175
Iteration 19009 => Loss: 22.84273681076136
Iteration 19010 => Loss: 22.842736810681057
Iteration 19011 => Loss: 22.842736810600822
Iteration 19012 => Loss: 22.842736810520698
Iteration 19013 => Loss: 22.84273681044065
Iteration 19014 => Loss: 22.842736810360687
Iteration 19015 => Loss: 22.842736810280787
Iteration 19016 => Loss: 22.842736810200996
Iteration 19017 => Loss: 22.842736810121288
Iteration 19018 => Loss: 22.84273681004165
Iteration 19019 => Loss: 22.842736809962123
Iteration 19020 => Loss: 22.84273680988266
Iteration 19021 => Loss: 22.84273680980329
Iteration 19022 => Loss: 22.842736809723988
Iteration 19023 => Loss: 22.842736809644794
Iteration 19024 => Loss: 22.842736809565686
Iteration 19025 => Loss: 22.842736809486645
Iteration 19026 => Loss: 22.842736809407686
Iteration 19027 => Loss: 22.842736809328812
Iteration 19028 => Loss: 22.842736809250027
Iteration 19029 => Loss: 22.842736809171335
Iteration 19030 => Loss: 22.84273680909271
Iteration 19031 => Loss: 22.84273680901418
Iteration 19032 => Loss: 22.84273680893573
Iteration 19033 => Loss: 22.842736808857364
Iteration 19034 => Loss: 22.842736808779073
Iteration 19035 => Loss: 22.842736808700884
Iteration 19036 => Loss: 22.84273680862276
Iteration 19037 => Loss: 22.842736808544732
Iteration 19038 => Loss: 22.84273680846678
Iteration 19039 => Loss: 22.8427368083889
Iteration 19040 => Loss: 22.842736808311113
Iteration 19041 => Loss: 22.84273680823342
Iteration 19042 => Loss: 22.84273680815579
Iteration 19043 => Loss: 22.84273680807826
Iteration 19044 => Loss: 22.842736808000815
Iteration 19045 => Loss: 22.842736807923437
Iteration 19046 => Loss: 22.842736807846133
Iteration 19047 => Loss: 22.84273680776893
Iteration 19048 => Loss: 22.842736807691796
Iteration 19049 => Loss: 22.842736807614767
Iteration 19050 => Loss: 22.842736807537804
Iteration 19051 => Loss: 22.842736807460913
Iteration 19052 => Loss: 22.842736807384117
Iteration 19053 => Loss: 22.842736807307393
Iteration 19054 => Loss: 22.842736807230768
Iteration 19055 => Loss: 22.842736807154214
Iteration 19056 => Loss: 22.842736807077753
Iteration 19057 => Loss: 22.84273680700134
Iteration 19058 => Loss: 22.84273680692504
Iteration 19059 => Loss: 22.842736806848816
Iteration 19060 => Loss: 22.842736806772667
Iteration 19061 => Loss: 22.842736806696596
Iteration 19062 => Loss: 22.842736806620604
Iteration 19063 => Loss: 22.8427368065447
Iteration 19064 => Loss: 22.842736806468874
Iteration 19065 => Loss: 22.842736806393134
Iteration 19066 => Loss: 22.84273680631747
Iteration 19067 => Loss: 22.84273680624188
Iteration 19068 => Loss: 22.842736806166375
Iteration 19069 => Loss: 22.84273680609096
Iteration 19070 => Loss: 22.842736806015616
Iteration 19071 => Loss: 22.84273680594034
Iteration 19072 => Loss: 22.842736805865165
Iteration 19073 => Loss: 22.842736805790068
Iteration 19074 => Loss: 22.842736805715035
Iteration 19075 => Loss: 22.842736805640097
Iteration 19076 => Loss: 22.842736805565245
Iteration 19077 => Loss: 22.842736805490457
Iteration 19078 => Loss: 22.842736805415733
Iteration 19079 => Loss: 22.84273680534111
Iteration 19080 => Loss: 22.84273680526659
Iteration 19081 => Loss: 22.842736805192114
Iteration 19082 => Loss: 22.842736805117717
Iteration 19083 => Loss: 22.84273680504342
Iteration 19084 => Loss: 22.84273680496918
Iteration 19085 => Loss: 22.84273680489504
Iteration 19086 => Loss: 22.842736804820955
Iteration 19087 => Loss: 22.84273680474696
Iteration 19088 => Loss: 22.842736804673063
Iteration 19089 => Loss: 22.842736804599216
Iteration 19090 => Loss: 22.842736804525455
Iteration 19091 => Loss: 22.842736804451786
Iteration 19092 => Loss: 22.842736804378173
Iteration 19093 => Loss: 22.84273680430466
Iteration 19094 => Loss: 22.84273680423123
Iteration 19095 => Loss: 22.842736804157855
Iteration 19096 => Loss: 22.842736804084563
Iteration 19097 => Loss: 22.84273680401135
Iteration 19098 => Loss: 22.842736803938223
Iteration 19099 => Loss: 22.84273680386516
Iteration 19100 => Loss: 22.842736803792185
Iteration 19101 => Loss: 22.842736803719287
Iteration 19102 => Loss: 22.84273680364645
Iteration 19103 => Loss: 22.842736803573725
Iteration 19104 => Loss: 22.842736803501055
Iteration 19105 => Loss: 22.842736803428462
Iteration 19106 => Loss: 22.842736803355955
Iteration 19107 => Loss: 22.842736803283522
Iteration 19108 => Loss: 22.84273680321116
Iteration 19109 => Loss: 22.84273680313887
Iteration 19110 => Loss: 22.84273680306666
Iteration 19111 => Loss: 22.842736802994533
Iteration 19112 => Loss: 22.842736802922502
Iteration 19113 => Loss: 22.842736802850517
Iteration 19114 => Loss: 22.842736802778624
Iteration 19115 => Loss: 22.842736802706796
Iteration 19116 => Loss: 22.842736802635052
Iteration 19117 => Loss: 22.842736802563362
Iteration 19118 => Loss: 22.842736802491796
Iteration 19119 => Loss: 22.842736802420266
Iteration 19120 => Loss: 22.84273680234884
Iteration 19121 => Loss: 22.842736802277468
Iteration 19122 => Loss: 22.842736802206176
Iteration 19123 => Loss: 22.842736802134972
Iteration 19124 => Loss: 22.84273680206383
Iteration 19125 => Loss: 22.842736801992768
Iteration 19126 => Loss: 22.84273680192178
Iteration 19127 => Loss: 22.84273680185087
Iteration 19128 => Loss: 22.84273680178003
Iteration 19129 => Loss: 22.84273680170927
Iteration 19130 => Loss: 22.842736801638587
Iteration 19131 => Loss: 22.842736801567977
Iteration 19132 => Loss: 22.84273680149744
Iteration 19133 => Loss: 22.842736801426973
Iteration 19134 => Loss: 22.842736801356597
Iteration 19135 => Loss: 22.842736801286286
Iteration 19136 => Loss: 22.84273680121605
Iteration 19137 => Loss: 22.8427368011459
Iteration 19138 => Loss: 22.842736801075805
Iteration 19139 => Loss: 22.84273680100579
Iteration 19140 => Loss: 22.84273680093585
Iteration 19141 => Loss: 22.84273680086599
Iteration 19142 => Loss: 22.842736800796207
Iteration 19143 => Loss: 22.842736800726485
Iteration 19144 => Loss: 22.842736800656848
Iteration 19145 => Loss: 22.84273680058728
Iteration 19146 => Loss: 22.842736800517795
Iteration 19147 => Loss: 22.84273680044837
Iteration 19148 => Loss: 22.842736800379022
Iteration 19149 => Loss: 22.842736800309748
Iteration 19150 => Loss: 22.84273680024057
Iteration 19151 => Loss: 22.84273680017144
Iteration 19152 => Loss: 22.842736800102386
Iteration 19153 => Loss: 22.842736800033396
Iteration 19154 => Loss: 22.8427367999645
Iteration 19155 => Loss: 22.842736799895658
Iteration 19156 => Loss: 22.842736799826906
Iteration 19157 => Loss: 22.84273679975823
Iteration 19158 => Loss: 22.842736799689614
Iteration 19159 => Loss: 22.842736799621076
Iteration 19160 => Loss: 22.842736799552615
Iteration 19161 => Loss: 22.84273679948421
Iteration 19162 => Loss: 22.84273679941589
Iteration 19163 => Loss: 22.842736799347655
Iteration 19164 => Loss: 22.84273679927947
Iteration 19165 => Loss: 22.842736799211373
Iteration 19166 => Loss: 22.84273679914335
Iteration 19167 => Loss: 22.84273679907539
Iteration 19168 => Loss: 22.84273679900749
Iteration 19169 => Loss: 22.842736798939693
Iteration 19170 => Loss: 22.842736798871947
Iteration 19171 => Loss: 22.842736798804275
Iteration 19172 => Loss: 22.842736798736677
Iteration 19173 => Loss: 22.842736798669147
Iteration 19174 => Loss: 22.8427367986017
Iteration 19175 => Loss: 22.84273679853432
Iteration 19176 => Loss: 22.842736798467
Iteration 19177 => Loss: 22.84273679839977
Iteration 19178 => Loss: 22.842736798332606
Iteration 19179 => Loss: 22.8427367982655
Iteration 19180 => Loss: 22.842736798198473
Iteration 19181 => Loss: 22.842736798131522
Iteration 19182 => Loss: 22.84273679806465
Iteration 19183 => Loss: 22.842736797997837
Iteration 19184 => Loss: 22.8427367979311
Iteration 19185 => Loss: 22.842736797864436
Iteration 19186 => Loss: 22.84273679779783
Iteration 19187 => Loss: 22.842736797731302
Iteration 19188 => Loss: 22.842736797664852
Iteration 19189 => Loss: 22.842736797598455
Iteration 19190 => Loss: 22.84273679753214
Iteration 19191 => Loss: 22.842736797465907
Iteration 19192 => Loss: 22.842736797399734
Iteration 19193 => Loss: 22.842736797333618
Iteration 19194 => Loss: 22.842736797267587
Iteration 19195 => Loss: 22.842736797201624
Iteration 19196 => Loss: 22.842736797135718
Iteration 19197 => Loss: 22.842736797069904
Iteration 19198 => Loss: 22.842736797004147
Iteration 19199 => Loss: 22.84273679693847
Iteration 19200 => Loss: 22.84273679687286
Iteration 19201 => Loss: 22.842736796807305
Iteration 19202 => Loss: 22.84273679674184
Iteration 19203 => Loss: 22.842736796676423
Iteration 19204 => Loss: 22.842736796611096
Iteration 19205 => Loss: 22.842736796545818
Iteration 19206 => Loss: 22.842736796480626
Iteration 19207 => Loss: 22.84273679641551
Iteration 19208 => Loss: 22.842736796350454
Iteration 19209 => Loss: 22.842736796285454
Iteration 19210 => Loss: 22.84273679622055
Iteration 19211 => Loss: 22.84273679615569
Iteration 19212 => Loss: 22.842736796090904
Iteration 19213 => Loss: 22.842736796026195
Iteration 19214 => Loss: 22.84273679596154
Iteration 19215 => Loss: 22.84273679589698
Iteration 19216 => Loss: 22.842736795832465
Iteration 19217 => Loss: 22.842736795768033
Iteration 19218 => Loss: 22.84273679570366
Iteration 19219 => Loss: 22.84273679563935
Iteration 19220 => Loss: 22.842736795575124
Iteration 19221 => Loss: 22.842736795510955
Iteration 19222 => Loss: 22.842736795446854
Iteration 19223 => Loss: 22.842736795382837
Iteration 19224 => Loss: 22.84273679531888
Iteration 19225 => Loss: 22.842736795254986
Iteration 19226 => Loss: 22.84273679519116
Iteration 19227 => Loss: 22.842736795127404
Iteration 19228 => Loss: 22.842736795063715
Iteration 19229 => Loss: 22.842736795000086
Iteration 19230 => Loss: 22.84273679493654
Iteration 19231 => Loss: 22.842736794873062
Iteration 19232 => Loss: 22.842736794809646
Iteration 19233 => Loss: 22.842736794746276
Iteration 19234 => Loss: 22.842736794682985
Iteration 19235 => Loss: 22.84273679461978
Iteration 19236 => Loss: 22.842736794556632
Iteration 19237 => Loss: 22.842736794493554
Iteration 19238 => Loss: 22.84273679443053
Iteration 19239 => Loss: 22.8427367943676
Iteration 19240 => Loss: 22.842736794304702
Iteration 19241 => Loss: 22.84273679424189
Iteration 19242 => Loss: 22.84273679417914
Iteration 19243 => Loss: 22.84273679411645
Iteration 19244 => Loss: 22.842736794053852
Iteration 19245 => Loss: 22.842736793991307
Iteration 19246 => Loss: 22.842736793928818
Iteration 19247 => Loss: 22.842736793866415
Iteration 19248 => Loss: 22.842736793804065
Iteration 19249 => Loss: 22.84273679374177
Iteration 19250 => Loss: 22.842736793679567
Iteration 19251 => Loss: 22.842736793617398
Iteration 19252 => Loss: 22.842736793555318
Iteration 19253 => Loss: 22.84273679349331
Iteration 19254 => Loss: 22.842736793431364
Iteration 19255 => Loss: 22.84273679336947
Iteration 19256 => Loss: 22.842736793307644
Iteration 19257 => Loss: 22.842736793245887
Iteration 19258 => Loss: 22.84273679318421
Iteration 19259 => Loss: 22.842736793122583
Iteration 19260 => Loss: 22.84273679306102
Iteration 19261 => Loss: 22.842736792999524
Iteration 19262 => Loss: 22.842736792938105
Iteration 19263 => Loss: 22.84273679287675
Iteration 19264 => Loss: 22.842736792815447
Iteration 19265 => Loss: 22.842736792754224
Iteration 19266 => Loss: 22.84273679269305
Iteration 19267 => Loss: 22.842736792631943
Iteration 19268 => Loss: 22.842736792570904
Iteration 19269 => Loss: 22.842736792509946
Iteration 19270 => Loss: 22.842736792449028
Iteration 19271 => Loss: 22.8427367923882
Iteration 19272 => Loss: 22.842736792327408
Iteration 19273 => Loss: 22.8427367922667
Iteration 19274 => Loss: 22.842736792206054
Iteration 19275 => Loss: 22.842736792145466
Iteration 19276 => Loss: 22.842736792084953
Iteration 19277 => Loss: 22.842736792024496
Iteration 19278 => Loss: 22.8427367919641
Iteration 19279 => Loss: 22.842736791903786
Iteration 19280 => Loss: 22.842736791843528
Iteration 19281 => Loss: 22.842736791783313
Iteration 19282 => Loss: 22.842736791723187
Iteration 19283 => Loss: 22.842736791663114
Iteration 19284 => Loss: 22.842736791603116
Iteration 19285 => Loss: 22.842736791543167
Iteration 19286 => Loss: 22.84273679148329
Iteration 19287 => Loss: 22.842736791423462
Iteration 19288 => Loss: 22.842736791363716
Iteration 19289 => Loss: 22.84273679130404
Iteration 19290 => Loss: 22.842736791244416
Iteration 19291 => Loss: 22.84273679118485
Iteration 19292 => Loss: 22.842736791125347
Iteration 19293 => Loss: 22.842736791065914
Iteration 19294 => Loss: 22.84273679100654
Iteration 19295 => Loss: 22.842736790947242
Iteration 19296 => Loss: 22.842736790887987
Iteration 19297 => Loss: 22.842736790828795
Iteration 19298 => Loss: 22.84273679076969
Iteration 19299 => Loss: 22.842736790710624
Iteration 19300 => Loss: 22.84273679065164
Iteration 19301 => Loss: 22.8427367905927
Iteration 19302 => Loss: 22.842736790533834
Iteration 19303 => Loss: 22.842736790475033
Iteration 19304 => Loss: 22.842736790416282
Iteration 19305 => Loss: 22.84273679035761
Iteration 19306 => Loss: 22.842736790298993
Iteration 19307 => Loss: 22.842736790240416
Iteration 19308 => Loss: 22.84273679018194
Iteration 19309 => Loss: 22.842736790123503
Iteration 19310 => Loss: 22.842736790065135
Iteration 19311 => Loss: 22.842736790006825
Iteration 19312 => Loss: 22.842736789948578
Iteration 19313 => Loss: 22.842736789890388
Iteration 19314 => Loss: 22.842736789832284
Iteration 19315 => Loss: 22.84273678977422
Iteration 19316 => Loss: 22.842736789716227
Iteration 19317 => Loss: 22.842736789658282
Iteration 19318 => Loss: 22.842736789600412
Iteration 19319 => Loss: 22.842736789542595
Iteration 19320 => Loss: 22.842736789484835
Iteration 19321 => Loss: 22.842736789427153
Iteration 19322 => Loss: 22.842736789369503
Iteration 19323 => Loss: 22.842736789311953
Iteration 19324 => Loss: 22.842736789254445
Iteration 19325 => Loss: 22.842736789196998
Iteration 19326 => Loss: 22.842736789139618
Iteration 19327 => Loss: 22.842736789082288
Iteration 19328 => Loss: 22.84273678902502
Iteration 19329 => Loss: 22.842736788967837
Iteration 19330 => Loss: 22.842736788910678
Iteration 19331 => Loss: 22.8427367888536
Iteration 19332 => Loss: 22.84273678879658
Iteration 19333 => Loss: 22.84273678873963
Iteration 19334 => Loss: 22.842736788682732
Iteration 19335 => Loss: 22.84273678862588
Iteration 19336 => Loss: 22.842736788569105
Iteration 19337 => Loss: 22.842736788512397
Iteration 19338 => Loss: 22.84273678845573
Iteration 19339 => Loss: 22.842736788399137
Iteration 19340 => Loss: 22.842736788342595
Iteration 19341 => Loss: 22.842736788286132
Iteration 19342 => Loss: 22.842736788229693
Iteration 19343 => Loss: 22.842736788173355
Iteration 19344 => Loss: 22.842736788117048
Iteration 19345 => Loss: 22.84273678806081
Iteration 19346 => Loss: 22.842736788004636
Iteration 19347 => Loss: 22.842736787948528
Iteration 19348 => Loss: 22.842736787892463
Iteration 19349 => Loss: 22.84273678783646
Iteration 19350 => Loss: 22.842736787780517
Iteration 19351 => Loss: 22.84273678772465
Iteration 19352 => Loss: 22.84273678766882
Iteration 19353 => Loss: 22.842736787613074
Iteration 19354 => Loss: 22.84273678755737
Iteration 19355 => Loss: 22.842736787501725
Iteration 19356 => Loss: 22.842736787446146
Iteration 19357 => Loss: 22.84273678739062
Iteration 19358 => Loss: 22.842736787335138
Iteration 19359 => Loss: 22.84273678727974
Iteration 19360 => Loss: 22.8427367872244
Iteration 19361 => Loss: 22.84273678716911
Iteration 19362 => Loss: 22.842736787113882
Iteration 19363 => Loss: 22.842736787058712
Iteration 19364 => Loss: 22.8427367870036
Iteration 19365 => Loss: 22.84273678694853
Iteration 19366 => Loss: 22.84273678689355
Iteration 19367 => Loss: 22.842736786838607
Iteration 19368 => Loss: 22.84273678678374
Iteration 19369 => Loss: 22.84273678672893
Iteration 19370 => Loss: 22.842736786674152
Iteration 19371 => Loss: 22.84273678661946
Iteration 19372 => Loss: 22.84273678656481
Iteration 19373 => Loss: 22.842736786510212
Iteration 19374 => Loss: 22.8427367864557
Iteration 19375 => Loss: 22.84273678640123
Iteration 19376 => Loss: 22.842736786346805
Iteration 19377 => Loss: 22.842736786292445
Iteration 19378 => Loss: 22.842736786238152
Iteration 19379 => Loss: 22.842736786183913
Iteration 19380 => Loss: 22.842736786129727
Iteration 19381 => Loss: 22.842736786075612
Iteration 19382 => Loss: 22.842736786021543
Iteration 19383 => Loss: 22.84273678596753
Iteration 19384 => Loss: 22.842736785913573
Iteration 19385 => Loss: 22.84273678585968
Iteration 19386 => Loss: 22.84273678580585
Iteration 19387 => Loss: 22.842736785752074
Iteration 19388 => Loss: 22.84273678569834
Iteration 19389 => Loss: 22.842736785644675
Iteration 19390 => Loss: 22.842736785591065
Iteration 19391 => Loss: 22.842736785537518
Iteration 19392 => Loss: 22.84273678548402
Iteration 19393 => Loss: 22.842736785430585
Iteration 19394 => Loss: 22.842736785377202
Iteration 19395 => Loss: 22.842736785323872
Iteration 19396 => Loss: 22.84273678527061
Iteration 19397 => Loss: 22.842736785217404
Iteration 19398 => Loss: 22.842736785164245
Iteration 19399 => Loss: 22.84273678511114
Iteration 19400 => Loss: 22.842736785058115
Iteration 19401 => Loss: 22.842736785005123
Iteration 19402 => Loss: 22.84273678495219
Iteration 19403 => Loss: 22.842736784899316
Iteration 19404 => Loss: 22.842736784846508
Iteration 19405 => Loss: 22.84273678479374
Iteration 19406 => Loss: 22.842736784741053
Iteration 19407 => Loss: 22.84273678468838
Iteration 19408 => Loss: 22.842736784635797
Iteration 19409 => Loss: 22.842736784583266
Iteration 19410 => Loss: 22.842736784530775
Iteration 19411 => Loss: 22.842736784478348
Iteration 19412 => Loss: 22.842736784425988
Iteration 19413 => Loss: 22.842736784373663
Iteration 19414 => Loss: 22.842736784321414
Iteration 19415 => Loss: 22.842736784269214
Iteration 19416 => Loss: 22.842736784217056
Iteration 19417 => Loss: 22.842736784164977
Iteration 19418 => Loss: 22.842736784112926
Iteration 19419 => Loss: 22.842736784060953
Iteration 19420 => Loss: 22.842736784009027
Iteration 19421 => Loss: 22.84273678395716
Iteration 19422 => Loss: 22.842736783905348
Iteration 19423 => Loss: 22.84273678385359
Iteration 19424 => Loss: 22.84273678380188
Iteration 19425 => Loss: 22.842736783750233
Iteration 19426 => Loss: 22.84273678369863
Iteration 19427 => Loss: 22.842736783647094
Iteration 19428 => Loss: 22.842736783595612
Iteration 19429 => Loss: 22.842736783544183
Iteration 19430 => Loss: 22.842736783492803
Iteration 19431 => Loss: 22.84273678344148
Iteration 19432 => Loss: 22.84273678339022
Iteration 19433 => Loss: 22.84273678333901
Iteration 19434 => Loss: 22.842736783287837
Iteration 19435 => Loss: 22.842736783236735
Iteration 19436 => Loss: 22.842736783185686
Iteration 19437 => Loss: 22.842736783134693
Iteration 19438 => Loss: 22.842736783083758
Iteration 19439 => Loss: 22.84273678303287
Iteration 19440 => Loss: 22.84273678298204
Iteration 19441 => Loss: 22.84273678293126
Iteration 19442 => Loss: 22.84273678288053
Iteration 19443 => Loss: 22.842736782829864
Iteration 19444 => Loss: 22.84273678277925
Iteration 19445 => Loss: 22.842736782728682
Iteration 19446 => Loss: 22.842736782678173
Iteration 19447 => Loss: 22.842736782627714
Iteration 19448 => Loss: 22.84273678257731
Iteration 19449 => Loss: 22.842736782526963
Iteration 19450 => Loss: 22.84273678247668
Iteration 19451 => Loss: 22.842736782426435
Iteration 19452 => Loss: 22.84273678237624
Iteration 19453 => Loss: 22.84273678232612
Iteration 19454 => Loss: 22.842736782276024
Iteration 19455 => Loss: 22.842736782226005
Iteration 19456 => Loss: 22.842736782176026
Iteration 19457 => Loss: 22.842736782126103
Iteration 19458 => Loss: 22.842736782076244
Iteration 19459 => Loss: 22.84273678202641
Iteration 19460 => Loss: 22.84273678197665
Iteration 19461 => Loss: 22.84273678192695
Iteration 19462 => Loss: 22.842736781877296
Iteration 19463 => Loss: 22.842736781827686
Iteration 19464 => Loss: 22.842736781778132
Iteration 19465 => Loss: 22.842736781728625
Iteration 19466 => Loss: 22.842736781679193
Iteration 19467 => Loss: 22.8427367816298
Iteration 19468 => Loss: 22.84273678158045
Iteration 19469 => Loss: 22.842736781531165
Iteration 19470 => Loss: 22.842736781481925
Iteration 19471 => Loss: 22.842736781432745
Iteration 19472 => Loss: 22.84273678138361
Iteration 19473 => Loss: 22.842736781334533
Iteration 19474 => Loss: 22.842736781285506
Iteration 19475 => Loss: 22.84273678123652
Iteration 19476 => Loss: 22.84273678118761
Iteration 19477 => Loss: 22.84273678113874
Iteration 19478 => Loss: 22.842736781089926
Iteration 19479 => Loss: 22.842736781041143
Iteration 19480 => Loss: 22.842736780992432
Iteration 19481 => Loss: 22.842736780943774
Iteration 19482 => Loss: 22.842736780895162
Iteration 19483 => Loss: 22.842736780846607
Iteration 19484 => Loss: 22.84273678079809
Iteration 19485 => Loss: 22.84273678074963
Iteration 19486 => Loss: 22.842736780701223
Iteration 19487 => Loss: 22.84273678065289
Iteration 19488 => Loss: 22.84273678060457
Iteration 19489 => Loss: 22.842736780556308
Iteration 19490 => Loss: 22.842736780508115
Iteration 19491 => Loss: 22.84273678045996
Iteration 19492 => Loss: 22.842736780411872
Iteration 19493 => Loss: 22.842736780363815
Iteration 19494 => Loss: 22.84273678031583
Iteration 19495 => Loss: 22.842736780267884
Iteration 19496 => Loss: 22.84273678021999
Iteration 19497 => Loss: 22.842736780172157
Iteration 19498 => Loss: 22.84273678012436
Iteration 19499 => Loss: 22.84273678007662
Iteration 19500 => Loss: 22.842736780028936
Iteration 19501 => Loss: 22.842736779981273
Iteration 19502 => Loss: 22.8427367799337
Iteration 19503 => Loss: 22.842736779886152
Iteration 19504 => Loss: 22.842736779838678
Iteration 19505 => Loss: 22.84273677979124
Iteration 19506 => Loss: 22.84273677974386
Iteration 19507 => Loss: 22.842736779696516
Iteration 19508 => Loss: 22.842736779649222
Iteration 19509 => Loss: 22.842736779601985
Iteration 19510 => Loss: 22.84273677955481
Iteration 19511 => Loss: 22.842736779507675
Iteration 19512 => Loss: 22.842736779460594
Iteration 19513 => Loss: 22.84273677941355
Iteration 19514 => Loss: 22.842736779366568
Iteration 19515 => Loss: 22.842736779319626
Iteration 19516 => Loss: 22.842736779272748
Iteration 19517 => Loss: 22.842736779225913
Iteration 19518 => Loss: 22.842736779179123
Iteration 19519 => Loss: 22.84273677913239
Iteration 19520 => Loss: 22.842736779085705
Iteration 19521 => Loss: 22.842736779039054
Iteration 19522 => Loss: 22.842736778992485
Iteration 19523 => Loss: 22.84273677894593
Iteration 19524 => Loss: 22.84273677889945
Iteration 19525 => Loss: 22.84273677885301
Iteration 19526 => Loss: 22.842736778806614
Iteration 19527 => Loss: 22.842736778760287
Iteration 19528 => Loss: 22.84273677871398
Iteration 19529 => Loss: 22.842736778667756
Iteration 19530 => Loss: 22.84273677862156
Iteration 19531 => Loss: 22.842736778575418
Iteration 19532 => Loss: 22.84273677852932
Iteration 19533 => Loss: 22.842736778483278
Iteration 19534 => Loss: 22.84273677843727
Iteration 19535 => Loss: 22.842736778391327
Iteration 19536 => Loss: 22.84273677834543
Iteration 19537 => Loss: 22.842736778299578
Iteration 19538 => Loss: 22.842736778253776
Iteration 19539 => Loss: 22.842736778208014
Iteration 19540 => Loss: 22.842736778162326
Iteration 19541 => Loss: 22.842736778116667
Iteration 19542 => Loss: 22.842736778071064
Iteration 19543 => Loss: 22.8427367780255
Iteration 19544 => Loss: 22.842736777979997
Iteration 19545 => Loss: 22.842736777934544
Iteration 19546 => Loss: 22.84273677788911
Iteration 19547 => Loss: 22.842736777843758
Iteration 19548 => Loss: 22.842736777798446
Iteration 19549 => Loss: 22.842736777753178
Iteration 19550 => Loss: 22.842736777707948
Iteration 19551 => Loss: 22.842736777662772
Iteration 19552 => Loss: 22.84273677761765
Iteration 19553 => Loss: 22.84273677757256
Iteration 19554 => Loss: 22.84273677752757
Iteration 19555 => Loss: 22.842736777482568
Iteration 19556 => Loss: 22.842736777437626
Iteration 19557 => Loss: 22.842736777392744
Iteration 19558 => Loss: 22.842736777347916
Iteration 19559 => Loss: 22.842736777303124
Iteration 19560 => Loss: 22.842736777258388
Iteration 19561 => Loss: 22.84273677721367
Iteration 19562 => Loss: 22.84273677716904
Iteration 19563 => Loss: 22.84273677712444
Iteration 19564 => Loss: 22.842736777079885
Iteration 19565 => Loss: 22.842736777035373
Iteration 19566 => Loss: 22.842736776990925
Iteration 19567 => Loss: 22.842736776946513
Iteration 19568 => Loss: 22.842736776902157
Iteration 19569 => Loss: 22.842736776857823
Iteration 19570 => Loss: 22.84273677681357
Iteration 19571 => Loss: 22.842736776769346
Iteration 19572 => Loss: 22.84273677672517
Iteration 19573 => Loss: 22.842736776681036
Iteration 19574 => Loss: 22.84273677663696
Iteration 19575 => Loss: 22.842736776592922
Iteration 19576 => Loss: 22.842736776548932
Iteration 19577 => Loss: 22.842736776505
Iteration 19578 => Loss: 22.8427367764611
Iteration 19579 => Loss: 22.84273677641726
Iteration 19580 => Loss: 22.842736776373464
Iteration 19581 => Loss: 22.842736776329705
Iteration 19582 => Loss: 22.842736776286007
Iteration 19583 => Loss: 22.842736776242326
Iteration 19584 => Loss: 22.842736776198716
Iteration 19585 => Loss: 22.842736776155153
Iteration 19586 => Loss: 22.842736776111632
Iteration 19587 => Loss: 22.84273677606817
Iteration 19588 => Loss: 22.84273677602473
Iteration 19589 => Loss: 22.842736775981333
Iteration 19590 => Loss: 22.842736775938004
Iteration 19591 => Loss: 22.84273677589471
Iteration 19592 => Loss: 22.842736775851474
Iteration 19593 => Loss: 22.842736775808273
Iteration 19594 => Loss: 22.842736775765125
Iteration 19595 => Loss: 22.84273677572202
Iteration 19596 => Loss: 22.842736775678965
Iteration 19597 => Loss: 22.842736775635935
Iteration 19598 => Loss: 22.842736775592968
Iteration 19599 => Loss: 22.842736775550048
Iteration 19600 => Loss: 22.842736775507166
Iteration 19601 => Loss: 22.84273677546433
Iteration 19602 => Loss: 22.84273677542154
Iteration 19603 => Loss: 22.842736775378803
Iteration 19604 => Loss: 22.842736775336103
Iteration 19605 => Loss: 22.842736775293442
Iteration 19606 => Loss: 22.84273677525084
Iteration 19607 => Loss: 22.84273677520829
Iteration 19608 => Loss: 22.84273677516577
Iteration 19609 => Loss: 22.842736775123296
Iteration 19610 => Loss: 22.842736775080873
Iteration 19611 => Loss: 22.842736775038492
Iteration 19612 => Loss: 22.84273677499616
Iteration 19613 => Loss: 22.84273677495387
Iteration 19614 => Loss: 22.842736774911636
Iteration 19615 => Loss: 22.842736774869426
Iteration 19616 => Loss: 22.842736774827284
Iteration 19617 => Loss: 22.842736774785166
Iteration 19618 => Loss: 22.842736774743095
Iteration 19619 => Loss: 22.842736774701077
Iteration 19620 => Loss: 22.842736774659098
Iteration 19621 => Loss: 22.842736774617165
Iteration 19622 => Loss: 22.842736774575283
Iteration 19623 => Loss: 22.84273677453343
Iteration 19624 => Loss: 22.84273677449164
Iteration 19625 => Loss: 22.842736774449893
Iteration 19626 => Loss: 22.842736774408177
Iteration 19627 => Loss: 22.84273677436651
Iteration 19628 => Loss: 22.8427367743249
Iteration 19629 => Loss: 22.842736774283313
Iteration 19630 => Loss: 22.842736774241782
Iteration 19631 => Loss: 22.8427367742003
Iteration 19632 => Loss: 22.84273677415886
Iteration 19633 => Loss: 22.84273677411745
Iteration 19634 => Loss: 22.842736774076105
Iteration 19635 => Loss: 22.84273677403479
Iteration 19636 => Loss: 22.84273677399352
Iteration 19637 => Loss: 22.8427367739523
Iteration 19638 => Loss: 22.842736773911117
Iteration 19639 => Loss: 22.84273677386998
Iteration 19640 => Loss: 22.84273677382889
Iteration 19641 => Loss: 22.842736773787852
Iteration 19642 => Loss: 22.842736773746832
Iteration 19643 => Loss: 22.84273677370588
Iteration 19644 => Loss: 22.842736773664956
Iteration 19645 => Loss: 22.84273677362409
Iteration 19646 => Loss: 22.842736773583248
Iteration 19647 => Loss: 22.842736773542455
Iteration 19648 => Loss: 22.842736773501713
Iteration 19649 => Loss: 22.842736773461024
Iteration 19650 => Loss: 22.842736773420356
Iteration 19651 => Loss: 22.84273677337974
Iteration 19652 => Loss: 22.842736773339166
Iteration 19653 => Loss: 22.842736773298657
Iteration 19654 => Loss: 22.84273677325816
Iteration 19655 => Loss: 22.842736773217723
Iteration 19656 => Loss: 22.84273677317731
Iteration 19657 => Loss: 22.842736773136956
Iteration 19658 => Loss: 22.84273677309665
Iteration 19659 => Loss: 22.84273677305637
Iteration 19660 => Loss: 22.842736773016153
Iteration 19661 => Loss: 22.842736772975968
Iteration 19662 => Loss: 22.842736772935815
Iteration 19663 => Loss: 22.84273677289572
Iteration 19664 => Loss: 22.842736772855663
Iteration 19665 => Loss: 22.842736772815645
Iteration 19666 => Loss: 22.842736772775677
Iteration 19667 => Loss: 22.842736772735748
Iteration 19668 => Loss: 22.84273677269586
Iteration 19669 => Loss: 22.842736772656014
Iteration 19670 => Loss: 22.842736772616213
Iteration 19671 => Loss: 22.842736772576448
Iteration 19672 => Loss: 22.84273677253674
Iteration 19673 => Loss: 22.842736772497062
Iteration 19674 => Loss: 22.842736772457428
Iteration 19675 => Loss: 22.842736772417837
Iteration 19676 => Loss: 22.84273677237828
Iteration 19677 => Loss: 22.842736772338785
Iteration 19678 => Loss: 22.842736772299315
Iteration 19679 => Loss: 22.84273677225989
Iteration 19680 => Loss: 22.842736772220515
Iteration 19681 => Loss: 22.842736772181173
Iteration 19682 => Loss: 22.842736772141883
Iteration 19683 => Loss: 22.842736772102622
Iteration 19684 => Loss: 22.84273677206339
Iteration 19685 => Loss: 22.842736772024224
Iteration 19686 => Loss: 22.842736771985106
Iteration 19687 => Loss: 22.84273677194601
Iteration 19688 => Loss: 22.842736771906964
Iteration 19689 => Loss: 22.842736771867944
Iteration 19690 => Loss: 22.842736771828996
Iteration 19691 => Loss: 22.842736771790065
Iteration 19692 => Loss: 22.842736771751202
Iteration 19693 => Loss: 22.84273677171235
Iteration 19694 => Loss: 22.842736771673554
Iteration 19695 => Loss: 22.842736771634797
Iteration 19696 => Loss: 22.84273677159606
Iteration 19697 => Loss: 22.842736771557398
Iteration 19698 => Loss: 22.84273677151877
Iteration 19699 => Loss: 22.842736771480176
Iteration 19700 => Loss: 22.842736771441615
Iteration 19701 => Loss: 22.842736771403107
Iteration 19702 => Loss: 22.84273677136464
Iteration 19703 => Loss: 22.842736771326212
Iteration 19704 => Loss: 22.842736771287818
Iteration 19705 => Loss: 22.842736771249466
Iteration 19706 => Loss: 22.84273677121117
Iteration 19707 => Loss: 22.84273677117291
Iteration 19708 => Loss: 22.842736771134685
Iteration 19709 => Loss: 22.84273677109649
Iteration 19710 => Loss: 22.842736771058334
Iteration 19711 => Loss: 22.842736771020245
Iteration 19712 => Loss: 22.84273677098219
Iteration 19713 => Loss: 22.842736770944157
Iteration 19714 => Loss: 22.84273677090617
Iteration 19715 => Loss: 22.842736770868225
Iteration 19716 => Loss: 22.84273677083032
Iteration 19717 => Loss: 22.842736770792463
Iteration 19718 => Loss: 22.842736770754648
Iteration 19719 => Loss: 22.842736770716858
Iteration 19720 => Loss: 22.842736770679114
Iteration 19721 => Loss: 22.842736770641427
Iteration 19722 => Loss: 22.842736770603754
Iteration 19723 => Loss: 22.842736770566134
Iteration 19724 => Loss: 22.842736770528553
Iteration 19725 => Loss: 22.842736770491022
Iteration 19726 => Loss: 22.842736770453524
Iteration 19727 => Loss: 22.84273677041605
Iteration 19728 => Loss: 22.84273677037864
Iteration 19729 => Loss: 22.842736770341254
Iteration 19730 => Loss: 22.84273677030391
Iteration 19731 => Loss: 22.8427367702666
Iteration 19732 => Loss: 22.842736770229347
Iteration 19733 => Loss: 22.84273677019213
Iteration 19734 => Loss: 22.842736770154932
Iteration 19735 => Loss: 22.84273677011781
Iteration 19736 => Loss: 22.842736770080702
Iteration 19737 => Loss: 22.842736770043633
Iteration 19738 => Loss: 22.842736770006603
Iteration 19739 => Loss: 22.842736769969616
Iteration 19740 => Loss: 22.842736769932674
Iteration 19741 => Loss: 22.842736769895755
Iteration 19742 => Loss: 22.84273676985889
Iteration 19743 => Loss: 22.842736769822075
Iteration 19744 => Loss: 22.842736769785272
Iteration 19745 => Loss: 22.842736769748527
Iteration 19746 => Loss: 22.842736769711824
Iteration 19747 => Loss: 22.842736769675135
Iteration 19748 => Loss: 22.84273676963851
Iteration 19749 => Loss: 22.842736769601913
Iteration 19750 => Loss: 22.842736769565363
Iteration 19751 => Loss: 22.84273676952885
Iteration 19752 => Loss: 22.84273676949236
Iteration 19753 => Loss: 22.84273676945592
Iteration 19754 => Loss: 22.842736769419517
Iteration 19755 => Loss: 22.842736769383162
Iteration 19756 => Loss: 22.842736769346836
Iteration 19757 => Loss: 22.84273676931056
Iteration 19758 => Loss: 22.842736769274303
Iteration 19759 => Loss: 22.8427367692381
Iteration 19760 => Loss: 22.84273676920194
Iteration 19761 => Loss: 22.8427367691658
Iteration 19762 => Loss: 22.842736769129704
Iteration 19763 => Loss: 22.842736769093662
Iteration 19764 => Loss: 22.842736769057634
Iteration 19765 => Loss: 22.842736769021656
Iteration 19766 => Loss: 22.842736768985716
Iteration 19767 => Loss: 22.842736768949816
Iteration 19768 => Loss: 22.84273676891397
Iteration 19769 => Loss: 22.842736768878144
Iteration 19770 => Loss: 22.842736768842357
Iteration 19771 => Loss: 22.84273676880661
Iteration 19772 => Loss: 22.8427367687709
Iteration 19773 => Loss: 22.842736768735215
Iteration 19774 => Loss: 22.842736768699577
Iteration 19775 => Loss: 22.842736768663993
Iteration 19776 => Loss: 22.842736768628424
Iteration 19777 => Loss: 22.842736768592903
Iteration 19778 => Loss: 22.842736768557433
Iteration 19779 => Loss: 22.842736768521977
Iteration 19780 => Loss: 22.842736768486567
Iteration 19781 => Loss: 22.842736768451193
Iteration 19782 => Loss: 22.84273676841587
Iteration 19783 => Loss: 22.842736768380576
Iteration 19784 => Loss: 22.84273676834531
Iteration 19785 => Loss: 22.842736768310104
Iteration 19786 => Loss: 22.842736768274918
Iteration 19787 => Loss: 22.842736768239757
Iteration 19788 => Loss: 22.842736768204652
Iteration 19789 => Loss: 22.842736768169594
Iteration 19790 => Loss: 22.84273676813455
Iteration 19791 => Loss: 22.84273676809956
Iteration 19792 => Loss: 22.842736768064597
Iteration 19793 => Loss: 22.842736768029678
Iteration 19794 => Loss: 22.842736767994793
Iteration 19795 => Loss: 22.842736767959945
Iteration 19796 => Loss: 22.84273676792514
Iteration 19797 => Loss: 22.842736767890372
Iteration 19798 => Loss: 22.84273676785562
Iteration 19799 => Loss: 22.842736767820917
Iteration 19800 => Loss: 22.84273676778626
Iteration 19801 => Loss: 22.84273676775164
Iteration 19802 => Loss: 22.84273676771706
Iteration 19803 => Loss: 22.84273676768249
Iteration 19804 => Loss: 22.842736767647985
Iteration 19805 => Loss: 22.842736767613495
Iteration 19806 => Loss: 22.842736767579066
Iteration 19807 => Loss: 22.84273676754466
Iteration 19808 => Loss: 22.84273676751028
Iteration 19809 => Loss: 22.842736767475955
Iteration 19810 => Loss: 22.842736767441657
Iteration 19811 => Loss: 22.842736767407395
Iteration 19812 => Loss: 22.84273676737318
Iteration 19813 => Loss: 22.842736767338984
Iteration 19814 => Loss: 22.842736767304835
Iteration 19815 => Loss: 22.84273676727072
Iteration 19816 => Loss: 22.842736767236648
Iteration 19817 => Loss: 22.8427367672026
Iteration 19818 => Loss: 22.84273676716861
Iteration 19819 => Loss: 22.84273676713463
Iteration 19820 => Loss: 22.842736767100686
Iteration 19821 => Loss: 22.8427367670668
Iteration 19822 => Loss: 22.842736767032928
Iteration 19823 => Loss: 22.84273676699911
Iteration 19824 => Loss: 22.842736766965324
Iteration 19825 => Loss: 22.842736766931576
Iteration 19826 => Loss: 22.842736766897858
Iteration 19827 => Loss: 22.842736766864167
Iteration 19828 => Loss: 22.84273676683052
Iteration 19829 => Loss: 22.842736766796925
Iteration 19830 => Loss: 22.84273676676333
Iteration 19831 => Loss: 22.842736766729804
Iteration 19832 => Loss: 22.84273676669631
Iteration 19833 => Loss: 22.842736766662835
Iteration 19834 => Loss: 22.842736766629404
Iteration 19835 => Loss: 22.842736766596
Iteration 19836 => Loss: 22.842736766562645
Iteration 19837 => Loss: 22.842736766529335
Iteration 19838 => Loss: 22.842736766496035
Iteration 19839 => Loss: 22.842736766462778
Iteration 19840 => Loss: 22.84273676642956
Iteration 19841 => Loss: 22.842736766396378
Iteration 19842 => Loss: 22.842736766363224
Iteration 19843 => Loss: 22.8427367663301
Iteration 19844 => Loss: 22.84273676629703
Iteration 19845 => Loss: 22.842736766263997
Iteration 19846 => Loss: 22.84273676623099
Iteration 19847 => Loss: 22.84273676619801
Iteration 19848 => Loss: 22.84273676616508
Iteration 19849 => Loss: 22.842736766132177
Iteration 19850 => Loss: 22.8427367660993
Iteration 19851 => Loss: 22.842736766066476
Iteration 19852 => Loss: 22.842736766033674
Iteration 19853 => Loss: 22.8427367660009
Iteration 19854 => Loss: 22.84273676596819
Iteration 19855 => Loss: 22.842736765935495
Iteration 19856 => Loss: 22.84273676590283
Iteration 19857 => Loss: 22.842736765870207
Iteration 19858 => Loss: 22.842736765837625
Iteration 19859 => Loss: 22.842736765805057
Iteration 19860 => Loss: 22.842736765772546
Iteration 19861 => Loss: 22.842736765740053
Iteration 19862 => Loss: 22.84273676570761
Iteration 19863 => Loss: 22.84273676567521
Iteration 19864 => Loss: 22.84273676564282
Iteration 19865 => Loss: 22.84273676561047
Iteration 19866 => Loss: 22.842736765578156
Iteration 19867 => Loss: 22.84273676554587
Iteration 19868 => Loss: 22.84273676551363
Iteration 19869 => Loss: 22.842736765481426
Iteration 19870 => Loss: 22.84273676544925
Iteration 19871 => Loss: 22.842736765417104
Iteration 19872 => Loss: 22.842736765384988
Iteration 19873 => Loss: 22.84273676535292
Iteration 19874 => Loss: 22.84273676532088
Iteration 19875 => Loss: 22.842736765288873
Iteration 19876 => Loss: 22.842736765256905
Iteration 19877 => Loss: 22.842736765224974
Iteration 19878 => Loss: 22.84273676519307
Iteration 19879 => Loss: 22.842736765161195
Iteration 19880 => Loss: 22.84273676512936
Iteration 19881 => Loss: 22.842736765097566
Iteration 19882 => Loss: 22.842736765065794
Iteration 19883 => Loss: 22.84273676503404
Iteration 19884 => Loss: 22.842736765002364
Iteration 19885 => Loss: 22.842736764970674
Iteration 19886 => Loss: 22.84273676493906
Iteration 19887 => Loss: 22.84273676490746
Iteration 19888 => Loss: 22.8427367648759
Iteration 19889 => Loss: 22.842736764844364
Iteration 19890 => Loss: 22.84273676481286
Iteration 19891 => Loss: 22.84273676478139
Iteration 19892 => Loss: 22.842736764749954
Iteration 19893 => Loss: 22.842736764718563
Iteration 19894 => Loss: 22.84273676468721
Iteration 19895 => Loss: 22.842736764655868
Iteration 19896 => Loss: 22.842736764624583
Iteration 19897 => Loss: 22.842736764593308
Iteration 19898 => Loss: 22.842736764562076
Iteration 19899 => Loss: 22.84273676453087
Iteration 19900 => Loss: 22.842736764499712
Iteration 19901 => Loss: 22.842736764468587
Iteration 19902 => Loss: 22.84273676443748
Iteration 19903 => Loss: 22.842736764406418
Iteration 19904 => Loss: 22.842736764375392
Iteration 19905 => Loss: 22.84273676434437
Iteration 19906 => Loss: 22.842736764313408
Iteration 19907 => Loss: 22.842736764282485
Iteration 19908 => Loss: 22.842736764251576
Iteration 19909 => Loss: 22.84273676422071
Iteration 19910 => Loss: 22.84273676418987
Iteration 19911 => Loss: 22.842736764159078
Iteration 19912 => Loss: 22.842736764128304
Iteration 19913 => Loss: 22.842736764097577
Iteration 19914 => Loss: 22.84273676406686
Iteration 19915 => Loss: 22.842736764036186
Iteration 19916 => Loss: 22.84273676400555
Iteration 19917 => Loss: 22.842736763974937
Iteration 19918 => Loss: 22.84273676394437
Iteration 19919 => Loss: 22.84273676391382
Iteration 19920 => Loss: 22.842736763883316
Iteration 19921 => Loss: 22.842736763852834
Iteration 19922 => Loss: 22.842736763822398
Iteration 19923 => Loss: 22.842736763791972
Iteration 19924 => Loss: 22.842736763761604
Iteration 19925 => Loss: 22.842736763731246
Iteration 19926 => Loss: 22.842736763700948
Iteration 19927 => Loss: 22.842736763670658
Iteration 19928 => Loss: 22.84273676364041
Iteration 19929 => Loss: 22.842736763610198
Iteration 19930 => Loss: 22.842736763580003
Iteration 19931 => Loss: 22.84273676354985
Iteration 19932 => Loss: 22.842736763519724
Iteration 19933 => Loss: 22.842736763489633
Iteration 19934 => Loss: 22.842736763459573
Iteration 19935 => Loss: 22.84273676342955
Iteration 19936 => Loss: 22.842736763399554
Iteration 19937 => Loss: 22.842736763369583
Iteration 19938 => Loss: 22.842736763339655
Iteration 19939 => Loss: 22.842736763309755
Iteration 19940 => Loss: 22.8427367632799
Iteration 19941 => Loss: 22.842736763250052
Iteration 19942 => Loss: 22.842736763220252
Iteration 19943 => Loss: 22.842736763190466
Iteration 19944 => Loss: 22.84273676316074
Iteration 19945 => Loss: 22.842736763131022
Iteration 19946 => Loss: 22.842736763101357
Iteration 19947 => Loss: 22.84273676307171
Iteration 19948 => Loss: 22.842736763042087
Iteration 19949 => Loss: 22.84273676301251
Iteration 19950 => Loss: 22.842736762982966
Iteration 19951 => Loss: 22.842736762953436
Iteration 19952 => Loss: 22.842736762923955
Iteration 19953 => Loss: 22.84273676289449
Iteration 19954 => Loss: 22.842736762865066
Iteration 19955 => Loss: 22.842736762835667
Iteration 19956 => Loss: 22.842736762806307
Iteration 19957 => Loss: 22.842736762776983
Iteration 19958 => Loss: 22.84273676274767
Iteration 19959 => Loss: 22.84273676271841
Iteration 19960 => Loss: 22.84273676268916
Iteration 19961 => Loss: 22.842736762659957
Iteration 19962 => Loss: 22.842736762630793
Iteration 19963 => Loss: 22.84273676260164
Iteration 19964 => Loss: 22.84273676257253
Iteration 19965 => Loss: 22.84273676254344
Iteration 19966 => Loss: 22.84273676251439
Iteration 19967 => Loss: 22.84273676248537
Iteration 19968 => Loss: 22.842736762456386
Iteration 19969 => Loss: 22.84273676242741
Iteration 19970 => Loss: 22.84273676239848
Iteration 19971 => Loss: 22.842736762369587
Iteration 19972 => Loss: 22.84273676234072
Iteration 19973 => Loss: 22.842736762311883
Iteration 19974 => Loss: 22.842736762283074
Iteration 19975 => Loss: 22.84273676225429
Iteration 19976 => Loss: 22.842736762225556
Iteration 19977 => Loss: 22.842736762196818
Iteration 19978 => Loss: 22.84273676216815
Iteration 19979 => Loss: 22.84273676213949
Iteration 19980 => Loss: 22.842736762110867
Iteration 19981 => Loss: 22.842736762082275
Iteration 19982 => Loss: 22.842736762053722
Iteration 19983 => Loss: 22.842736762025197
Iteration 19984 => Loss: 22.842736761996697
Iteration 19985 => Loss: 22.842736761968208
Iteration 19986 => Loss: 22.842736761939772
Iteration 19987 => Loss: 22.842736761911357
Iteration 19988 => Loss: 22.84273676188299
Iteration 19989 => Loss: 22.842736761854628
Iteration 19990 => Loss: 22.842736761826302
Iteration 19991 => Loss: 22.84273676179801
Iteration 19992 => Loss: 22.842736761769764
Iteration 19993 => Loss: 22.842736761741527
Iteration 19994 => Loss: 22.842736761713343
Iteration 19995 => Loss: 22.84273676168516
Iteration 19996 => Loss: 22.842736761657022
Iteration 19997 => Loss: 22.842736761628903
Iteration 19998 => Loss: 22.842736761600822
Iteration 19999 => Loss: 22.84273676157277
%{bias: 13.172267656369339, weight: 1.0811301699901938}

Predict the number of pizzas

n_reservations = Kino.Input.number("number of reservations", default: 20)
n = Kino.Input.read(n_reservations)

C3.LinearRegressionWithBias.predict(n, weight, bias)
34.79487105617322
# https://gist.github.com/kenichi/d941ab3965d6a49e612f1664eccab337
defmodule C3Test do
  import Nx.Defn

  defn predict(x, w, b) do
    x
    |> Nx.multiply(w)
    |> Nx.add(b)
  end

  defn loss(x, y, w, b) do
    x
    |> predict(w, b)
    |> Nx.subtract(y)
    |> Nx.pow(2)
    |> Nx.mean()
  end

  defn weight_gradient(x, y, w, b) do
    x
    |> predict(w, b)
    |> Nx.subtract(y)
    |> Nx.multiply(x)
    |> Nx.mean()
    |> Nx.multiply(2)
  end

  defn bias_gradient(x, y, w, b) do
    x
    |> predict(w, b)
    |> Nx.subtract(y)
    |> Nx.mean()
    |> Nx.multiply(2)
  end

  def gradients(%Nx.Tensor{} = tx, %Nx.Tensor{} = ty, w, b \\ 0) do
    {
      weight_gradient(tx, ty, w, b),
      bias_gradient(tx, ty, w, b)
    }
  end

  def train(%Nx.Tensor{} = tx, %Nx.Tensor{} = ty, i, lr) do
    Enum.reduce(1..i, {0, 0}, fn iteration, {weight, bias} ->
      current_loss = loss(tx, ty, weight, bias)
      IO.puts("Iteration #{iteration} => Loss: #{Nx.to_number(current_loss)}")

      {wg, bg} = gradients(tx, ty, weight, bias)

      {
        adjust(weight, wg, lr),
        adjust(bias, bg, lr)
      }
    end)
  end

  defn adjust(value, gradient, lr) do
    gradient
    |> Nx.multiply(lr)
    |> then(&Nx.subtract(value, &1))
  end
end
{:module, C3Test, <<70, 79, 82, 49, 0, 0, 27, ...>>, true}
{weight_t, bias_t} = C3Test.train(Nx.tensor(x), Nx.tensor(y), iterations, lr)
...
Iteration 19002 => Loss: 22.842737197875977
Iteration 19003 => Loss: 22.842737197875977
Iteration 19004 => Loss: 22.842737197875977
Iteration 19005 => Loss: 22.842737197875977
Iteration 19006 => Loss: 22.842737197875977
Iteration 19007 => Loss: 22.842737197875977
Iteration 19008 => Loss: 22.842737197875977
Iteration 19009 => Loss: 22.842737197875977
Iteration 19010 => Loss: 22.842737197875977
Iteration 19011 => Loss: 22.842737197875977
Iteration 19012 => Loss: 22.842737197875977
Iteration 19013 => Loss: 22.842737197875977
Iteration 19014 => Loss: 22.842737197875977
Iteration 19015 => Loss: 22.842737197875977
Iteration 19016 => Loss: 22.842737197875977
Iteration 19017 => Loss: 22.842737197875977
Iteration 19018 => Loss: 22.842737197875977
Iteration 19019 => Loss: 22.842737197875977
Iteration 19020 => Loss: 22.842737197875977
Iteration 19021 => Loss: 22.842737197875977
Iteration 19022 => Loss: 22.842737197875977
Iteration 19023 => Loss: 22.842737197875977
Iteration 19024 => Loss: 22.842737197875977
Iteration 19025 => Loss: 22.842737197875977
Iteration 19026 => Loss: 22.842737197875977
Iteration 19027 => Loss: 22.842737197875977
Iteration 19028 => Loss: 22.842737197875977
Iteration 19029 => Loss: 22.842737197875977
Iteration 19030 => Loss: 22.842737197875977
Iteration 19031 => Loss: 22.842737197875977
Iteration 19032 => Loss: 22.842737197875977
Iteration 19033 => Loss: 22.842737197875977
Iteration 19034 => Loss: 22.842737197875977
Iteration 19035 => Loss: 22.842737197875977
Iteration 19036 => Loss: 22.842737197875977
Iteration 19037 => Loss: 22.842737197875977
Iteration 19038 => Loss: 22.842737197875977
Iteration 19039 => Loss: 22.842737197875977
Iteration 19040 => Loss: 22.842737197875977
Iteration 19041 => Loss: 22.842737197875977
Iteration 19042 => Loss: 22.842737197875977
Iteration 19043 => Loss: 22.842737197875977
Iteration 19044 => Loss: 22.842737197875977
Iteration 19045 => Loss: 22.842737197875977
Iteration 19046 => Loss: 22.842737197875977
Iteration 19047 => Loss: 22.842737197875977
Iteration 19048 => Loss: 22.842737197875977
Iteration 19049 => Loss: 22.842737197875977
Iteration 19050 => Loss: 22.842737197875977
Iteration 19051 => Loss: 22.842737197875977
Iteration 19052 => Loss: 22.842737197875977
Iteration 19053 => Loss: 22.842737197875977
Iteration 19054 => Loss: 22.842737197875977
Iteration 19055 => Loss: 22.842737197875977
Iteration 19056 => Loss: 22.842737197875977
Iteration 19057 => Loss: 22.842737197875977
Iteration 19058 => Loss: 22.842737197875977
Iteration 19059 => Loss: 22.842737197875977
Iteration 19060 => Loss: 22.842737197875977
Iteration 19061 => Loss: 22.842737197875977
Iteration 19062 => Loss: 22.842737197875977
Iteration 19063 => Loss: 22.842737197875977
Iteration 19064 => Loss: 22.842737197875977
Iteration 19065 => Loss: 22.842737197875977
Iteration 19066 => Loss: 22.842737197875977
Iteration 19067 => Loss: 22.842737197875977
Iteration 19068 => Loss: 22.842737197875977
Iteration 19069 => Loss: 22.842737197875977
Iteration 19070 => Loss: 22.842737197875977
Iteration 19071 => Loss: 22.842737197875977
Iteration 19072 => Loss: 22.842737197875977
Iteration 19073 => Loss: 22.842737197875977
Iteration 19074 => Loss: 22.842737197875977
Iteration 19075 => Loss: 22.842737197875977
Iteration 19076 => Loss: 22.842737197875977
Iteration 19077 => Loss: 22.842737197875977
Iteration 19078 => Loss: 22.842737197875977
Iteration 19079 => Loss: 22.842737197875977
Iteration 19080 => Loss: 22.842737197875977
Iteration 19081 => Loss: 22.842737197875977
Iteration 19082 => Loss: 22.842737197875977
Iteration 19083 => Loss: 22.842737197875977
Iteration 19084 => Loss: 22.842737197875977
Iteration 19085 => Loss: 22.842737197875977
Iteration 19086 => Loss: 22.842737197875977
Iteration 19087 => Loss: 22.842737197875977
Iteration 19088 => Loss: 22.842737197875977
Iteration 19089 => Loss: 22.842737197875977
Iteration 19090 => Loss: 22.842737197875977
Iteration 19091 => Loss: 22.842737197875977
Iteration 19092 => Loss: 22.842737197875977
Iteration 19093 => Loss: 22.842737197875977
Iteration 19094 => Loss: 22.842737197875977
Iteration 19095 => Loss: 22.842737197875977
Iteration 19096 => Loss: 22.842737197875977
Iteration 19097 => Loss: 22.842737197875977
Iteration 19098 => Loss: 22.842737197875977
Iteration 19099 => Loss: 22.842737197875977
Iteration 19100 => Loss: 22.842737197875977
Iteration 19101 => Loss: 22.842737197875977
Iteration 19102 => Loss: 22.842737197875977
Iteration 19103 => Loss: 22.842737197875977
Iteration 19104 => Loss: 22.842737197875977
Iteration 19105 => Loss: 22.842737197875977
Iteration 19106 => Loss: 22.842737197875977
Iteration 19107 => Loss: 22.842737197875977
Iteration 19108 => Loss: 22.842737197875977
Iteration 19109 => Loss: 22.842737197875977
Iteration 19110 => Loss: 22.842737197875977
Iteration 19111 => Loss: 22.842737197875977
Iteration 19112 => Loss: 22.842737197875977
Iteration 19113 => Loss: 22.842737197875977
Iteration 19114 => Loss: 22.842737197875977
Iteration 19115 => Loss: 22.842737197875977
Iteration 19116 => Loss: 22.842737197875977
Iteration 19117 => Loss: 22.842737197875977
Iteration 19118 => Loss: 22.842737197875977
Iteration 19119 => Loss: 22.842737197875977
Iteration 19120 => Loss: 22.842737197875977
Iteration 19121 => Loss: 22.842737197875977
Iteration 19122 => Loss: 22.842737197875977
Iteration 19123 => Loss: 22.842737197875977
Iteration 19124 => Loss: 22.842737197875977
Iteration 19125 => Loss: 22.842737197875977
Iteration 19126 => Loss: 22.842737197875977
Iteration 19127 => Loss: 22.842737197875977
Iteration 19128 => Loss: 22.842737197875977
Iteration 19129 => Loss: 22.842737197875977
Iteration 19130 => Loss: 22.842737197875977
Iteration 19131 => Loss: 22.842737197875977
Iteration 19132 => Loss: 22.842737197875977
Iteration 19133 => Loss: 22.842737197875977
Iteration 19134 => Loss: 22.842737197875977
Iteration 19135 => Loss: 22.842737197875977
Iteration 19136 => Loss: 22.842737197875977
Iteration 19137 => Loss: 22.842737197875977
Iteration 19138 => Loss: 22.842737197875977
Iteration 19139 => Loss: 22.842737197875977
Iteration 19140 => Loss: 22.842737197875977
Iteration 19141 => Loss: 22.842737197875977
Iteration 19142 => Loss: 22.842737197875977
Iteration 19143 => Loss: 22.842737197875977
Iteration 19144 => Loss: 22.842737197875977
Iteration 19145 => Loss: 22.842737197875977
Iteration 19146 => Loss: 22.842737197875977
Iteration 19147 => Loss: 22.842737197875977
Iteration 19148 => Loss: 22.842737197875977
Iteration 19149 => Loss: 22.842737197875977
Iteration 19150 => Loss: 22.842737197875977
Iteration 19151 => Loss: 22.842737197875977
Iteration 19152 => Loss: 22.842737197875977
Iteration 19153 => Loss: 22.842737197875977
Iteration 19154 => Loss: 22.842737197875977
Iteration 19155 => Loss: 22.842737197875977
Iteration 19156 => Loss: 22.842737197875977
Iteration 19157 => Loss: 22.842737197875977
Iteration 19158 => Loss: 22.842737197875977
Iteration 19159 => Loss: 22.842737197875977
Iteration 19160 => Loss: 22.842737197875977
Iteration 19161 => Loss: 22.842737197875977
Iteration 19162 => Loss: 22.842737197875977
Iteration 19163 => Loss: 22.842737197875977
Iteration 19164 => Loss: 22.842737197875977
Iteration 19165 => Loss: 22.842737197875977
Iteration 19166 => Loss: 22.842737197875977
Iteration 19167 => Loss: 22.842737197875977
Iteration 19168 => Loss: 22.842737197875977
Iteration 19169 => Loss: 22.842737197875977
Iteration 19170 => Loss: 22.842737197875977
Iteration 19171 => Loss: 22.842737197875977
Iteration 19172 => Loss: 22.842737197875977
Iteration 19173 => Loss: 22.842737197875977
Iteration 19174 => Loss: 22.842737197875977
Iteration 19175 => Loss: 22.842737197875977
Iteration 19176 => Loss: 22.842737197875977
Iteration 19177 => Loss: 22.842737197875977
Iteration 19178 => Loss: 22.842737197875977
Iteration 19179 => Loss: 22.842737197875977
Iteration 19180 => Loss: 22.842737197875977
Iteration 19181 => Loss: 22.842737197875977
Iteration 19182 => Loss: 22.842737197875977
Iteration 19183 => Loss: 22.842737197875977
Iteration 19184 => Loss: 22.842737197875977
Iteration 19185 => Loss: 22.842737197875977
Iteration 19186 => Loss: 22.842737197875977
Iteration 19187 => Loss: 22.842737197875977
Iteration 19188 => Loss: 22.842737197875977
Iteration 19189 => Loss: 22.842737197875977
Iteration 19190 => Loss: 22.842737197875977
Iteration 19191 => Loss: 22.842737197875977
Iteration 19192 => Loss: 22.842737197875977
Iteration 19193 => Loss: 22.842737197875977
Iteration 19194 => Loss: 22.842737197875977
Iteration 19195 => Loss: 22.842737197875977
Iteration 19196 => Loss: 22.842737197875977
Iteration 19197 => Loss: 22.842737197875977
Iteration 19198 => Loss: 22.842737197875977
Iteration 19199 => Loss: 22.842737197875977
Iteration 19200 => Loss: 22.842737197875977
Iteration 19201 => Loss: 22.842737197875977
Iteration 19202 => Loss: 22.842737197875977
Iteration 19203 => Loss: 22.842737197875977
Iteration 19204 => Loss: 22.842737197875977
Iteration 19205 => Loss: 22.842737197875977
Iteration 19206 => Loss: 22.842737197875977
Iteration 19207 => Loss: 22.842737197875977
Iteration 19208 => Loss: 22.842737197875977
Iteration 19209 => Loss: 22.842737197875977
Iteration 19210 => Loss: 22.842737197875977
Iteration 19211 => Loss: 22.842737197875977
Iteration 19212 => Loss: 22.842737197875977
Iteration 19213 => Loss: 22.842737197875977
Iteration 19214 => Loss: 22.842737197875977
Iteration 19215 => Loss: 22.842737197875977
Iteration 19216 => Loss: 22.842737197875977
Iteration 19217 => Loss: 22.842737197875977
Iteration 19218 => Loss: 22.842737197875977
Iteration 19219 => Loss: 22.842737197875977
Iteration 19220 => Loss: 22.842737197875977
Iteration 19221 => Loss: 22.842737197875977
Iteration 19222 => Loss: 22.842737197875977
Iteration 19223 => Loss: 22.842737197875977
Iteration 19224 => Loss: 22.842737197875977
Iteration 19225 => Loss: 22.842737197875977
Iteration 19226 => Loss: 22.842737197875977
Iteration 19227 => Loss: 22.842737197875977
Iteration 19228 => Loss: 22.842737197875977
Iteration 19229 => Loss: 22.842737197875977
Iteration 19230 => Loss: 22.842737197875977
Iteration 19231 => Loss: 22.842737197875977
Iteration 19232 => Loss: 22.842737197875977
Iteration 19233 => Loss: 22.842737197875977
Iteration 19234 => Loss: 22.842737197875977
Iteration 19235 => Loss: 22.842737197875977
Iteration 19236 => Loss: 22.842737197875977
Iteration 19237 => Loss: 22.842737197875977
Iteration 19238 => Loss: 22.842737197875977
Iteration 19239 => Loss: 22.842737197875977
Iteration 19240 => Loss: 22.842737197875977
Iteration 19241 => Loss: 22.842737197875977
Iteration 19242 => Loss: 22.842737197875977
Iteration 19243 => Loss: 22.842737197875977
Iteration 19244 => Loss: 22.842737197875977
Iteration 19245 => Loss: 22.842737197875977
Iteration 19246 => Loss: 22.842737197875977
Iteration 19247 => Loss: 22.842737197875977
Iteration 19248 => Loss: 22.842737197875977
Iteration 19249 => Loss: 22.842737197875977
Iteration 19250 => Loss: 22.842737197875977
Iteration 19251 => Loss: 22.842737197875977
Iteration 19252 => Loss: 22.842737197875977
Iteration 19253 => Loss: 22.842737197875977
Iteration 19254 => Loss: 22.842737197875977
Iteration 19255 => Loss: 22.842737197875977
Iteration 19256 => Loss: 22.842737197875977
Iteration 19257 => Loss: 22.842737197875977
Iteration 19258 => Loss: 22.842737197875977
Iteration 19259 => Loss: 22.842737197875977
Iteration 19260 => Loss: 22.842737197875977
Iteration 19261 => Loss: 22.842737197875977
Iteration 19262 => Loss: 22.842737197875977
Iteration 19263 => Loss: 22.842737197875977
Iteration 19264 => Loss: 22.842737197875977
Iteration 19265 => Loss: 22.842737197875977
Iteration 19266 => Loss: 22.842737197875977
Iteration 19267 => Loss: 22.842737197875977
Iteration 19268 => Loss: 22.842737197875977
Iteration 19269 => Loss: 22.842737197875977
Iteration 19270 => Loss: 22.842737197875977
Iteration 19271 => Loss: 22.842737197875977
Iteration 19272 => Loss: 22.842737197875977
Iteration 19273 => Loss: 22.842737197875977
Iteration 19274 => Loss: 22.842737197875977
Iteration 19275 => Loss: 22.842737197875977
Iteration 19276 => Loss: 22.842737197875977
Iteration 19277 => Loss: 22.842737197875977
Iteration 19278 => Loss: 22.842737197875977
Iteration 19279 => Loss: 22.842737197875977
Iteration 19280 => Loss: 22.842737197875977
Iteration 19281 => Loss: 22.842737197875977
Iteration 19282 => Loss: 22.842737197875977
Iteration 19283 => Loss: 22.842737197875977
Iteration 19284 => Loss: 22.842737197875977
Iteration 19285 => Loss: 22.842737197875977
Iteration 19286 => Loss: 22.842737197875977
Iteration 19287 => Loss: 22.842737197875977
Iteration 19288 => Loss: 22.842737197875977
Iteration 19289 => Loss: 22.842737197875977
Iteration 19290 => Loss: 22.842737197875977
Iteration 19291 => Loss: 22.842737197875977
Iteration 19292 => Loss: 22.842737197875977
Iteration 19293 => Loss: 22.842737197875977
Iteration 19294 => Loss: 22.842737197875977
Iteration 19295 => Loss: 22.842737197875977
Iteration 19296 => Loss: 22.842737197875977
Iteration 19297 => Loss: 22.842737197875977
Iteration 19298 => Loss: 22.842737197875977
Iteration 19299 => Loss: 22.842737197875977
Iteration 19300 => Loss: 22.842737197875977
Iteration 19301 => Loss: 22.842737197875977
Iteration 19302 => Loss: 22.842737197875977
Iteration 19303 => Loss: 22.842737197875977
Iteration 19304 => Loss: 22.842737197875977
Iteration 19305 => Loss: 22.842737197875977
Iteration 19306 => Loss: 22.842737197875977
Iteration 19307 => Loss: 22.842737197875977
Iteration 19308 => Loss: 22.842737197875977
Iteration 19309 => Loss: 22.842737197875977
Iteration 19310 => Loss: 22.842737197875977
Iteration 19311 => Loss: 22.842737197875977
Iteration 19312 => Loss: 22.842737197875977
Iteration 19313 => Loss: 22.842737197875977
Iteration 19314 => Loss: 22.842737197875977
Iteration 19315 => Loss: 22.842737197875977
Iteration 19316 => Loss: 22.842737197875977
Iteration 19317 => Loss: 22.842737197875977
Iteration 19318 => Loss: 22.842737197875977
Iteration 19319 => Loss: 22.842737197875977
Iteration 19320 => Loss: 22.842737197875977
Iteration 19321 => Loss: 22.842737197875977
Iteration 19322 => Loss: 22.842737197875977
Iteration 19323 => Loss: 22.842737197875977
Iteration 19324 => Loss: 22.842737197875977
Iteration 19325 => Loss: 22.842737197875977
Iteration 19326 => Loss: 22.842737197875977
Iteration 19327 => Loss: 22.842737197875977
Iteration 19328 => Loss: 22.842737197875977
Iteration 19329 => Loss: 22.842737197875977
Iteration 19330 => Loss: 22.842737197875977
Iteration 19331 => Loss: 22.842737197875977
Iteration 19332 => Loss: 22.842737197875977
Iteration 19333 => Loss: 22.842737197875977
Iteration 19334 => Loss: 22.842737197875977
Iteration 19335 => Loss: 22.842737197875977
Iteration 19336 => Loss: 22.842737197875977
Iteration 19337 => Loss: 22.842737197875977
Iteration 19338 => Loss: 22.842737197875977
Iteration 19339 => Loss: 22.842737197875977
Iteration 19340 => Loss: 22.842737197875977
Iteration 19341 => Loss: 22.842737197875977
Iteration 19342 => Loss: 22.842737197875977
Iteration 19343 => Loss: 22.842737197875977
Iteration 19344 => Loss: 22.842737197875977
Iteration 19345 => Loss: 22.842737197875977
Iteration 19346 => Loss: 22.842737197875977
Iteration 19347 => Loss: 22.842737197875977
Iteration 19348 => Loss: 22.842737197875977
Iteration 19349 => Loss: 22.842737197875977
Iteration 19350 => Loss: 22.842737197875977
Iteration 19351 => Loss: 22.842737197875977
Iteration 19352 => Loss: 22.842737197875977
Iteration 19353 => Loss: 22.842737197875977
Iteration 19354 => Loss: 22.842737197875977
Iteration 19355 => Loss: 22.842737197875977
Iteration 19356 => Loss: 22.842737197875977
Iteration 19357 => Loss: 22.842737197875977
Iteration 19358 => Loss: 22.842737197875977
Iteration 19359 => Loss: 22.842737197875977
Iteration 19360 => Loss: 22.842737197875977
Iteration 19361 => Loss: 22.842737197875977
Iteration 19362 => Loss: 22.842737197875977
Iteration 19363 => Loss: 22.842737197875977
Iteration 19364 => Loss: 22.842737197875977
Iteration 19365 => Loss: 22.842737197875977
Iteration 19366 => Loss: 22.842737197875977
Iteration 19367 => Loss: 22.842737197875977
Iteration 19368 => Loss: 22.842737197875977
Iteration 19369 => Loss: 22.842737197875977
Iteration 19370 => Loss: 22.842737197875977
Iteration 19371 => Loss: 22.842737197875977
Iteration 19372 => Loss: 22.842737197875977
Iteration 19373 => Loss: 22.842737197875977
Iteration 19374 => Loss: 22.842737197875977
Iteration 19375 => Loss: 22.842737197875977
Iteration 19376 => Loss: 22.842737197875977
Iteration 19377 => Loss: 22.842737197875977
Iteration 19378 => Loss: 22.842737197875977
Iteration 19379 => Loss: 22.842737197875977
Iteration 19380 => Loss: 22.842737197875977
Iteration 19381 => Loss: 22.842737197875977
Iteration 19382 => Loss: 22.842737197875977
Iteration 19383 => Loss: 22.842737197875977
Iteration 19384 => Loss: 22.842737197875977
Iteration 19385 => Loss: 22.842737197875977
Iteration 19386 => Loss: 22.842737197875977
Iteration 19387 => Loss: 22.842737197875977
Iteration 19388 => Loss: 22.842737197875977
Iteration 19389 => Loss: 22.842737197875977
Iteration 19390 => Loss: 22.842737197875977
Iteration 19391 => Loss: 22.842737197875977
Iteration 19392 => Loss: 22.842737197875977
Iteration 19393 => Loss: 22.842737197875977
Iteration 19394 => Loss: 22.842737197875977
Iteration 19395 => Loss: 22.842737197875977
Iteration 19396 => Loss: 22.842737197875977
Iteration 19397 => Loss: 22.842737197875977
Iteration 19398 => Loss: 22.842737197875977
Iteration 19399 => Loss: 22.842737197875977
Iteration 19400 => Loss: 22.842737197875977
Iteration 19401 => Loss: 22.842737197875977
Iteration 19402 => Loss: 22.842737197875977
Iteration 19403 => Loss: 22.842737197875977
Iteration 19404 => Loss: 22.842737197875977
Iteration 19405 => Loss: 22.842737197875977
Iteration 19406 => Loss: 22.842737197875977
Iteration 19407 => Loss: 22.842737197875977
Iteration 19408 => Loss: 22.842737197875977
Iteration 19409 => Loss: 22.842737197875977
Iteration 19410 => Loss: 22.842737197875977
Iteration 19411 => Loss: 22.842737197875977
Iteration 19412 => Loss: 22.842737197875977
Iteration 19413 => Loss: 22.842737197875977
Iteration 19414 => Loss: 22.842737197875977
Iteration 19415 => Loss: 22.842737197875977
Iteration 19416 => Loss: 22.842737197875977
Iteration 19417 => Loss: 22.842737197875977
Iteration 19418 => Loss: 22.842737197875977
Iteration 19419 => Loss: 22.842737197875977
Iteration 19420 => Loss: 22.842737197875977
Iteration 19421 => Loss: 22.842737197875977
Iteration 19422 => Loss: 22.842737197875977
Iteration 19423 => Loss: 22.842737197875977
Iteration 19424 => Loss: 22.842737197875977
Iteration 19425 => Loss: 22.842737197875977
Iteration 19426 => Loss: 22.842737197875977
Iteration 19427 => Loss: 22.842737197875977
Iteration 19428 => Loss: 22.842737197875977
Iteration 19429 => Loss: 22.842737197875977
Iteration 19430 => Loss: 22.842737197875977
Iteration 19431 => Loss: 22.842737197875977
Iteration 19432 => Loss: 22.842737197875977
Iteration 19433 => Loss: 22.842737197875977
Iteration 19434 => Loss: 22.842737197875977
Iteration 19435 => Loss: 22.842737197875977
Iteration 19436 => Loss: 22.842737197875977
Iteration 19437 => Loss: 22.842737197875977
Iteration 19438 => Loss: 22.842737197875977
Iteration 19439 => Loss: 22.842737197875977
Iteration 19440 => Loss: 22.842737197875977
Iteration 19441 => Loss: 22.842737197875977
Iteration 19442 => Loss: 22.842737197875977
Iteration 19443 => Loss: 22.842737197875977
Iteration 19444 => Loss: 22.842737197875977
Iteration 19445 => Loss: 22.842737197875977
Iteration 19446 => Loss: 22.842737197875977
Iteration 19447 => Loss: 22.842737197875977
Iteration 19448 => Loss: 22.842737197875977
Iteration 19449 => Loss: 22.842737197875977
Iteration 19450 => Loss: 22.842737197875977
Iteration 19451 => Loss: 22.842737197875977
Iteration 19452 => Loss: 22.842737197875977
Iteration 19453 => Loss: 22.842737197875977
Iteration 19454 => Loss: 22.842737197875977
Iteration 19455 => Loss: 22.842737197875977
Iteration 19456 => Loss: 22.842737197875977
Iteration 19457 => Loss: 22.842737197875977
Iteration 19458 => Loss: 22.842737197875977
Iteration 19459 => Loss: 22.842737197875977
Iteration 19460 => Loss: 22.842737197875977
Iteration 19461 => Loss: 22.842737197875977
Iteration 19462 => Loss: 22.842737197875977
Iteration 19463 => Loss: 22.842737197875977
Iteration 19464 => Loss: 22.842737197875977
Iteration 19465 => Loss: 22.842737197875977
Iteration 19466 => Loss: 22.842737197875977
Iteration 19467 => Loss: 22.842737197875977
Iteration 19468 => Loss: 22.842737197875977
Iteration 19469 => Loss: 22.842737197875977
Iteration 19470 => Loss: 22.842737197875977
Iteration 19471 => Loss: 22.842737197875977
Iteration 19472 => Loss: 22.842737197875977
Iteration 19473 => Loss: 22.842737197875977
Iteration 19474 => Loss: 22.842737197875977
Iteration 19475 => Loss: 22.842737197875977
Iteration 19476 => Loss: 22.842737197875977
Iteration 19477 => Loss: 22.842737197875977
Iteration 19478 => Loss: 22.842737197875977
Iteration 19479 => Loss: 22.842737197875977
Iteration 19480 => Loss: 22.842737197875977
Iteration 19481 => Loss: 22.842737197875977
Iteration 19482 => Loss: 22.842737197875977
Iteration 19483 => Loss: 22.842737197875977
Iteration 19484 => Loss: 22.842737197875977
Iteration 19485 => Loss: 22.842737197875977
Iteration 19486 => Loss: 22.842737197875977
Iteration 19487 => Loss: 22.842737197875977
Iteration 19488 => Loss: 22.842737197875977
Iteration 19489 => Loss: 22.842737197875977
Iteration 19490 => Loss: 22.842737197875977
Iteration 19491 => Loss: 22.842737197875977
Iteration 19492 => Loss: 22.842737197875977
Iteration 19493 => Loss: 22.842737197875977
Iteration 19494 => Loss: 22.842737197875977
Iteration 19495 => Loss: 22.842737197875977
Iteration 19496 => Loss: 22.842737197875977
Iteration 19497 => Loss: 22.842737197875977
Iteration 19498 => Loss: 22.842737197875977
Iteration 19499 => Loss: 22.842737197875977
Iteration 19500 => Loss: 22.842737197875977
Iteration 19501 => Loss: 22.842737197875977
Iteration 19502 => Loss: 22.842737197875977
Iteration 19503 => Loss: 22.842737197875977
Iteration 19504 => Loss: 22.842737197875977
Iteration 19505 => Loss: 22.842737197875977
Iteration 19506 => Loss: 22.842737197875977
Iteration 19507 => Loss: 22.842737197875977
Iteration 19508 => Loss: 22.842737197875977
Iteration 19509 => Loss: 22.842737197875977
Iteration 19510 => Loss: 22.842737197875977
Iteration 19511 => Loss: 22.842737197875977
Iteration 19512 => Loss: 22.842737197875977
Iteration 19513 => Loss: 22.842737197875977
Iteration 19514 => Loss: 22.842737197875977
Iteration 19515 => Loss: 22.842737197875977
Iteration 19516 => Loss: 22.842737197875977
Iteration 19517 => Loss: 22.842737197875977
Iteration 19518 => Loss: 22.842737197875977
Iteration 19519 => Loss: 22.842737197875977
Iteration 19520 => Loss: 22.842737197875977
Iteration 19521 => Loss: 22.842737197875977
Iteration 19522 => Loss: 22.842737197875977
Iteration 19523 => Loss: 22.842737197875977
Iteration 19524 => Loss: 22.842737197875977
Iteration 19525 => Loss: 22.842737197875977
Iteration 19526 => Loss: 22.842737197875977
Iteration 19527 => Loss: 22.842737197875977
Iteration 19528 => Loss: 22.842737197875977
Iteration 19529 => Loss: 22.842737197875977
Iteration 19530 => Loss: 22.842737197875977
Iteration 19531 => Loss: 22.842737197875977
Iteration 19532 => Loss: 22.842737197875977
Iteration 19533 => Loss: 22.842737197875977
Iteration 19534 => Loss: 22.842737197875977
Iteration 19535 => Loss: 22.842737197875977
Iteration 19536 => Loss: 22.842737197875977
Iteration 19537 => Loss: 22.842737197875977
Iteration 19538 => Loss: 22.842737197875977
Iteration 19539 => Loss: 22.842737197875977
Iteration 19540 => Loss: 22.842737197875977
Iteration 19541 => Loss: 22.842737197875977
Iteration 19542 => Loss: 22.842737197875977
Iteration 19543 => Loss: 22.842737197875977
Iteration 19544 => Loss: 22.842737197875977
Iteration 19545 => Loss: 22.842737197875977
Iteration 19546 => Loss: 22.842737197875977
Iteration 19547 => Loss: 22.842737197875977
Iteration 19548 => Loss: 22.842737197875977
Iteration 19549 => Loss: 22.842737197875977
Iteration 19550 => Loss: 22.842737197875977
Iteration 19551 => Loss: 22.842737197875977
Iteration 19552 => Loss: 22.842737197875977
Iteration 19553 => Loss: 22.842737197875977
Iteration 19554 => Loss: 22.842737197875977
Iteration 19555 => Loss: 22.842737197875977
Iteration 19556 => Loss: 22.842737197875977
Iteration 19557 => Loss: 22.842737197875977
Iteration 19558 => Loss: 22.842737197875977
Iteration 19559 => Loss: 22.842737197875977
Iteration 19560 => Loss: 22.842737197875977
Iteration 19561 => Loss: 22.842737197875977
Iteration 19562 => Loss: 22.842737197875977
Iteration 19563 => Loss: 22.842737197875977
Iteration 19564 => Loss: 22.842737197875977
Iteration 19565 => Loss: 22.842737197875977
Iteration 19566 => Loss: 22.842737197875977
Iteration 19567 => Loss: 22.842737197875977
Iteration 19568 => Loss: 22.842737197875977
Iteration 19569 => Loss: 22.842737197875977
Iteration 19570 => Loss: 22.842737197875977
Iteration 19571 => Loss: 22.842737197875977
Iteration 19572 => Loss: 22.842737197875977
Iteration 19573 => Loss: 22.842737197875977
Iteration 19574 => Loss: 22.842737197875977
Iteration 19575 => Loss: 22.842737197875977
Iteration 19576 => Loss: 22.842737197875977
Iteration 19577 => Loss: 22.842737197875977
Iteration 19578 => Loss: 22.842737197875977
Iteration 19579 => Loss: 22.842737197875977
Iteration 19580 => Loss: 22.842737197875977
Iteration 19581 => Loss: 22.842737197875977
Iteration 19582 => Loss: 22.842737197875977
Iteration 19583 => Loss: 22.842737197875977
Iteration 19584 => Loss: 22.842737197875977
Iteration 19585 => Loss: 22.842737197875977
Iteration 19586 => Loss: 22.842737197875977
Iteration 19587 => Loss: 22.842737197875977
Iteration 19588 => Loss: 22.842737197875977
Iteration 19589 => Loss: 22.842737197875977
Iteration 19590 => Loss: 22.842737197875977
Iteration 19591 => Loss: 22.842737197875977
Iteration 19592 => Loss: 22.842737197875977
Iteration 19593 => Loss: 22.842737197875977
Iteration 19594 => Loss: 22.842737197875977
Iteration 19595 => Loss: 22.842737197875977
Iteration 19596 => Loss: 22.842737197875977
Iteration 19597 => Loss: 22.842737197875977
Iteration 19598 => Loss: 22.842737197875977
Iteration 19599 => Loss: 22.842737197875977
Iteration 19600 => Loss: 22.842737197875977
Iteration 19601 => Loss: 22.842737197875977
Iteration 19602 => Loss: 22.842737197875977
Iteration 19603 => Loss: 22.842737197875977
Iteration 19604 => Loss: 22.842737197875977
Iteration 19605 => Loss: 22.842737197875977
Iteration 19606 => Loss: 22.842737197875977
Iteration 19607 => Loss: 22.842737197875977
Iteration 19608 => Loss: 22.842737197875977
Iteration 19609 => Loss: 22.842737197875977
Iteration 19610 => Loss: 22.842737197875977
Iteration 19611 => Loss: 22.842737197875977
Iteration 19612 => Loss: 22.842737197875977
Iteration 19613 => Loss: 22.842737197875977
Iteration 19614 => Loss: 22.842737197875977
Iteration 19615 => Loss: 22.842737197875977
Iteration 19616 => Loss: 22.842737197875977
Iteration 19617 => Loss: 22.842737197875977
Iteration 19618 => Loss: 22.842737197875977
Iteration 19619 => Loss: 22.842737197875977
Iteration 19620 => Loss: 22.842737197875977
Iteration 19621 => Loss: 22.842737197875977
Iteration 19622 => Loss: 22.842737197875977
Iteration 19623 => Loss: 22.842737197875977
Iteration 19624 => Loss: 22.842737197875977
Iteration 19625 => Loss: 22.842737197875977
Iteration 19626 => Loss: 22.842737197875977
Iteration 19627 => Loss: 22.842737197875977
Iteration 19628 => Loss: 22.842737197875977
Iteration 19629 => Loss: 22.842737197875977
Iteration 19630 => Loss: 22.842737197875977
Iteration 19631 => Loss: 22.842737197875977
Iteration 19632 => Loss: 22.842737197875977
Iteration 19633 => Loss: 22.842737197875977
Iteration 19634 => Loss: 22.842737197875977
Iteration 19635 => Loss: 22.842737197875977
Iteration 19636 => Loss: 22.842737197875977
Iteration 19637 => Loss: 22.842737197875977
Iteration 19638 => Loss: 22.842737197875977
Iteration 19639 => Loss: 22.842737197875977
Iteration 19640 => Loss: 22.842737197875977
Iteration 19641 => Loss: 22.842737197875977
Iteration 19642 => Loss: 22.842737197875977
Iteration 19643 => Loss: 22.842737197875977
Iteration 19644 => Loss: 22.842737197875977
Iteration 19645 => Loss: 22.842737197875977
Iteration 19646 => Loss: 22.842737197875977
Iteration 19647 => Loss: 22.842737197875977
Iteration 19648 => Loss: 22.842737197875977
Iteration 19649 => Loss: 22.842737197875977
Iteration 19650 => Loss: 22.842737197875977
Iteration 19651 => Loss: 22.842737197875977
Iteration 19652 => Loss: 22.842737197875977
Iteration 19653 => Loss: 22.842737197875977
Iteration 19654 => Loss: 22.842737197875977
Iteration 19655 => Loss: 22.842737197875977
Iteration 19656 => Loss: 22.842737197875977
Iteration 19657 => Loss: 22.842737197875977
Iteration 19658 => Loss: 22.842737197875977
Iteration 19659 => Loss: 22.842737197875977
Iteration 19660 => Loss: 22.842737197875977
Iteration 19661 => Loss: 22.842737197875977
Iteration 19662 => Loss: 22.842737197875977
Iteration 19663 => Loss: 22.842737197875977
Iteration 19664 => Loss: 22.842737197875977
Iteration 19665 => Loss: 22.842737197875977
Iteration 19666 => Loss: 22.842737197875977
Iteration 19667 => Loss: 22.842737197875977
Iteration 19668 => Loss: 22.842737197875977
Iteration 19669 => Loss: 22.842737197875977
Iteration 19670 => Loss: 22.842737197875977
Iteration 19671 => Loss: 22.842737197875977
Iteration 19672 => Loss: 22.842737197875977
Iteration 19673 => Loss: 22.842737197875977
Iteration 19674 => Loss: 22.842737197875977
Iteration 19675 => Loss: 22.842737197875977
Iteration 19676 => Loss: 22.842737197875977
Iteration 19677 => Loss: 22.842737197875977
Iteration 19678 => Loss: 22.842737197875977
Iteration 19679 => Loss: 22.842737197875977
Iteration 19680 => Loss: 22.842737197875977
Iteration 19681 => Loss: 22.842737197875977
Iteration 19682 => Loss: 22.842737197875977
Iteration 19683 => Loss: 22.842737197875977
Iteration 19684 => Loss: 22.842737197875977
Iteration 19685 => Loss: 22.842737197875977
Iteration 19686 => Loss: 22.842737197875977
Iteration 19687 => Loss: 22.842737197875977
Iteration 19688 => Loss: 22.842737197875977
Iteration 19689 => Loss: 22.842737197875977
Iteration 19690 => Loss: 22.842737197875977
Iteration 19691 => Loss: 22.842737197875977
Iteration 19692 => Loss: 22.842737197875977
Iteration 19693 => Loss: 22.842737197875977
Iteration 19694 => Loss: 22.842737197875977
Iteration 19695 => Loss: 22.842737197875977
Iteration 19696 => Loss: 22.842737197875977
Iteration 19697 => Loss: 22.842737197875977
Iteration 19698 => Loss: 22.842737197875977
Iteration 19699 => Loss: 22.842737197875977
Iteration 19700 => Loss: 22.842737197875977
Iteration 19701 => Loss: 22.842737197875977
Iteration 19702 => Loss: 22.842737197875977
Iteration 19703 => Loss: 22.842737197875977
Iteration 19704 => Loss: 22.842737197875977
Iteration 19705 => Loss: 22.842737197875977
Iteration 19706 => Loss: 22.842737197875977
Iteration 19707 => Loss: 22.842737197875977
Iteration 19708 => Loss: 22.842737197875977
Iteration 19709 => Loss: 22.842737197875977
Iteration 19710 => Loss: 22.842737197875977
Iteration 19711 => Loss: 22.842737197875977
Iteration 19712 => Loss: 22.842737197875977
Iteration 19713 => Loss: 22.842737197875977
Iteration 19714 => Loss: 22.842737197875977
Iteration 19715 => Loss: 22.842737197875977
Iteration 19716 => Loss: 22.842737197875977
Iteration 19717 => Loss: 22.842737197875977
Iteration 19718 => Loss: 22.842737197875977
Iteration 19719 => Loss: 22.842737197875977
Iteration 19720 => Loss: 22.842737197875977
Iteration 19721 => Loss: 22.842737197875977
Iteration 19722 => Loss: 22.842737197875977
Iteration 19723 => Loss: 22.842737197875977
Iteration 19724 => Loss: 22.842737197875977
Iteration 19725 => Loss: 22.842737197875977
Iteration 19726 => Loss: 22.842737197875977
Iteration 19727 => Loss: 22.842737197875977
Iteration 19728 => Loss: 22.842737197875977
Iteration 19729 => Loss: 22.842737197875977
Iteration 19730 => Loss: 22.842737197875977
Iteration 19731 => Loss: 22.842737197875977
Iteration 19732 => Loss: 22.842737197875977
Iteration 19733 => Loss: 22.842737197875977
Iteration 19734 => Loss: 22.842737197875977
Iteration 19735 => Loss: 22.842737197875977
Iteration 19736 => Loss: 22.842737197875977
Iteration 19737 => Loss: 22.842737197875977
Iteration 19738 => Loss: 22.842737197875977
Iteration 19739 => Loss: 22.842737197875977
Iteration 19740 => Loss: 22.842737197875977
Iteration 19741 => Loss: 22.842737197875977
Iteration 19742 => Loss: 22.842737197875977
Iteration 19743 => Loss: 22.842737197875977
Iteration 19744 => Loss: 22.842737197875977
Iteration 19745 => Loss: 22.842737197875977
Iteration 19746 => Loss: 22.842737197875977
Iteration 19747 => Loss: 22.842737197875977
Iteration 19748 => Loss: 22.842737197875977
Iteration 19749 => Loss: 22.842737197875977
Iteration 19750 => Loss: 22.842737197875977
Iteration 19751 => Loss: 22.842737197875977
Iteration 19752 => Loss: 22.842737197875977
Iteration 19753 => Loss: 22.842737197875977
Iteration 19754 => Loss: 22.842737197875977
Iteration 19755 => Loss: 22.842737197875977
Iteration 19756 => Loss: 22.842737197875977
Iteration 19757 => Loss: 22.842737197875977
Iteration 19758 => Loss: 22.842737197875977
Iteration 19759 => Loss: 22.842737197875977
Iteration 19760 => Loss: 22.842737197875977
Iteration 19761 => Loss: 22.842737197875977
Iteration 19762 => Loss: 22.842737197875977
Iteration 19763 => Loss: 22.842737197875977
Iteration 19764 => Loss: 22.842737197875977
Iteration 19765 => Loss: 22.842737197875977
Iteration 19766 => Loss: 22.842737197875977
Iteration 19767 => Loss: 22.842737197875977
Iteration 19768 => Loss: 22.842737197875977
Iteration 19769 => Loss: 22.842737197875977
Iteration 19770 => Loss: 22.842737197875977
Iteration 19771 => Loss: 22.842737197875977
Iteration 19772 => Loss: 22.842737197875977
Iteration 19773 => Loss: 22.842737197875977
Iteration 19774 => Loss: 22.842737197875977
Iteration 19775 => Loss: 22.842737197875977
Iteration 19776 => Loss: 22.842737197875977
Iteration 19777 => Loss: 22.842737197875977
Iteration 19778 => Loss: 22.842737197875977
Iteration 19779 => Loss: 22.842737197875977
Iteration 19780 => Loss: 22.842737197875977
Iteration 19781 => Loss: 22.842737197875977
Iteration 19782 => Loss: 22.842737197875977
Iteration 19783 => Loss: 22.842737197875977
Iteration 19784 => Loss: 22.842737197875977
Iteration 19785 => Loss: 22.842737197875977
Iteration 19786 => Loss: 22.842737197875977
Iteration 19787 => Loss: 22.842737197875977
Iteration 19788 => Loss: 22.842737197875977
Iteration 19789 => Loss: 22.842737197875977
Iteration 19790 => Loss: 22.842737197875977
Iteration 19791 => Loss: 22.842737197875977
Iteration 19792 => Loss: 22.842737197875977
Iteration 19793 => Loss: 22.842737197875977
Iteration 19794 => Loss: 22.842737197875977
Iteration 19795 => Loss: 22.842737197875977
Iteration 19796 => Loss: 22.842737197875977
Iteration 19797 => Loss: 22.842737197875977
Iteration 19798 => Loss: 22.842737197875977
Iteration 19799 => Loss: 22.842737197875977
Iteration 19800 => Loss: 22.842737197875977
Iteration 19801 => Loss: 22.842737197875977
Iteration 19802 => Loss: 22.842737197875977
Iteration 19803 => Loss: 22.842737197875977
Iteration 19804 => Loss: 22.842737197875977
Iteration 19805 => Loss: 22.842737197875977
Iteration 19806 => Loss: 22.842737197875977
Iteration 19807 => Loss: 22.842737197875977
Iteration 19808 => Loss: 22.842737197875977
Iteration 19809 => Loss: 22.842737197875977
Iteration 19810 => Loss: 22.842737197875977
Iteration 19811 => Loss: 22.842737197875977
Iteration 19812 => Loss: 22.842737197875977
Iteration 19813 => Loss: 22.842737197875977
Iteration 19814 => Loss: 22.842737197875977
Iteration 19815 => Loss: 22.842737197875977
Iteration 19816 => Loss: 22.842737197875977
Iteration 19817 => Loss: 22.842737197875977
Iteration 19818 => Loss: 22.842737197875977
Iteration 19819 => Loss: 22.842737197875977
Iteration 19820 => Loss: 22.842737197875977
Iteration 19821 => Loss: 22.842737197875977
Iteration 19822 => Loss: 22.842737197875977
Iteration 19823 => Loss: 22.842737197875977
Iteration 19824 => Loss: 22.842737197875977
Iteration 19825 => Loss: 22.842737197875977
Iteration 19826 => Loss: 22.842737197875977
Iteration 19827 => Loss: 22.842737197875977
Iteration 19828 => Loss: 22.842737197875977
Iteration 19829 => Loss: 22.842737197875977
Iteration 19830 => Loss: 22.842737197875977
Iteration 19831 => Loss: 22.842737197875977
Iteration 19832 => Loss: 22.842737197875977
Iteration 19833 => Loss: 22.842737197875977
Iteration 19834 => Loss: 22.842737197875977
Iteration 19835 => Loss: 22.842737197875977
Iteration 19836 => Loss: 22.842737197875977
Iteration 19837 => Loss: 22.842737197875977
Iteration 19838 => Loss: 22.842737197875977
Iteration 19839 => Loss: 22.842737197875977
Iteration 19840 => Loss: 22.842737197875977
Iteration 19841 => Loss: 22.842737197875977
Iteration 19842 => Loss: 22.842737197875977
Iteration 19843 => Loss: 22.842737197875977
Iteration 19844 => Loss: 22.842737197875977
Iteration 19845 => Loss: 22.842737197875977
Iteration 19846 => Loss: 22.842737197875977
Iteration 19847 => Loss: 22.842737197875977
Iteration 19848 => Loss: 22.842737197875977
Iteration 19849 => Loss: 22.842737197875977
Iteration 19850 => Loss: 22.842737197875977
Iteration 19851 => Loss: 22.842737197875977
Iteration 19852 => Loss: 22.842737197875977
Iteration 19853 => Loss: 22.842737197875977
Iteration 19854 => Loss: 22.842737197875977
Iteration 19855 => Loss: 22.842737197875977
Iteration 19856 => Loss: 22.842737197875977
Iteration 19857 => Loss: 22.842737197875977
Iteration 19858 => Loss: 22.842737197875977
Iteration 19859 => Loss: 22.842737197875977
Iteration 19860 => Loss: 22.842737197875977
Iteration 19861 => Loss: 22.842737197875977
Iteration 19862 => Loss: 22.842737197875977
Iteration 19863 => Loss: 22.842737197875977
Iteration 19864 => Loss: 22.842737197875977
Iteration 19865 => Loss: 22.842737197875977
Iteration 19866 => Loss: 22.842737197875977
Iteration 19867 => Loss: 22.842737197875977
Iteration 19868 => Loss: 22.842737197875977
Iteration 19869 => Loss: 22.842737197875977
Iteration 19870 => Loss: 22.842737197875977
Iteration 19871 => Loss: 22.842737197875977
Iteration 19872 => Loss: 22.842737197875977
Iteration 19873 => Loss: 22.842737197875977
Iteration 19874 => Loss: 22.842737197875977
Iteration 19875 => Loss: 22.842737197875977
Iteration 19876 => Loss: 22.842737197875977
Iteration 19877 => Loss: 22.842737197875977
Iteration 19878 => Loss: 22.842737197875977
Iteration 19879 => Loss: 22.842737197875977
Iteration 19880 => Loss: 22.842737197875977
Iteration 19881 => Loss: 22.842737197875977
Iteration 19882 => Loss: 22.842737197875977
Iteration 19883 => Loss: 22.842737197875977
Iteration 19884 => Loss: 22.842737197875977
Iteration 19885 => Loss: 22.842737197875977
Iteration 19886 => Loss: 22.842737197875977
Iteration 19887 => Loss: 22.842737197875977
Iteration 19888 => Loss: 22.842737197875977
Iteration 19889 => Loss: 22.842737197875977
Iteration 19890 => Loss: 22.842737197875977
Iteration 19891 => Loss: 22.842737197875977
Iteration 19892 => Loss: 22.842737197875977
Iteration 19893 => Loss: 22.842737197875977
Iteration 19894 => Loss: 22.842737197875977
Iteration 19895 => Loss: 22.842737197875977
Iteration 19896 => Loss: 22.842737197875977
Iteration 19897 => Loss: 22.842737197875977
Iteration 19898 => Loss: 22.842737197875977
Iteration 19899 => Loss: 22.842737197875977
Iteration 19900 => Loss: 22.842737197875977
Iteration 19901 => Loss: 22.842737197875977
Iteration 19902 => Loss: 22.842737197875977
Iteration 19903 => Loss: 22.842737197875977
Iteration 19904 => Loss: 22.842737197875977
Iteration 19905 => Loss: 22.842737197875977
Iteration 19906 => Loss: 22.842737197875977
Iteration 19907 => Loss: 22.842737197875977
Iteration 19908 => Loss: 22.842737197875977
Iteration 19909 => Loss: 22.842737197875977
Iteration 19910 => Loss: 22.842737197875977
Iteration 19911 => Loss: 22.842737197875977
Iteration 19912 => Loss: 22.842737197875977
Iteration 19913 => Loss: 22.842737197875977
Iteration 19914 => Loss: 22.842737197875977
Iteration 19915 => Loss: 22.842737197875977
Iteration 19916 => Loss: 22.842737197875977
Iteration 19917 => Loss: 22.842737197875977
Iteration 19918 => Loss: 22.842737197875977
Iteration 19919 => Loss: 22.842737197875977
Iteration 19920 => Loss: 22.842737197875977
Iteration 19921 => Loss: 22.842737197875977
Iteration 19922 => Loss: 22.842737197875977
Iteration 19923 => Loss: 22.842737197875977
Iteration 19924 => Loss: 22.842737197875977
Iteration 19925 => Loss: 22.842737197875977
Iteration 19926 => Loss: 22.842737197875977
Iteration 19927 => Loss: 22.842737197875977
Iteration 19928 => Loss: 22.842737197875977
Iteration 19929 => Loss: 22.842737197875977
Iteration 19930 => Loss: 22.842737197875977
Iteration 19931 => Loss: 22.842737197875977
Iteration 19932 => Loss: 22.842737197875977
Iteration 19933 => Loss: 22.842737197875977
Iteration 19934 => Loss: 22.842737197875977
Iteration 19935 => Loss: 22.842737197875977
Iteration 19936 => Loss: 22.842737197875977
Iteration 19937 => Loss: 22.842737197875977
Iteration 19938 => Loss: 22.842737197875977
Iteration 19939 => Loss: 22.842737197875977
Iteration 19940 => Loss: 22.842737197875977
Iteration 19941 => Loss: 22.842737197875977
Iteration 19942 => Loss: 22.842737197875977
Iteration 19943 => Loss: 22.842737197875977
Iteration 19944 => Loss: 22.842737197875977
Iteration 19945 => Loss: 22.842737197875977
Iteration 19946 => Loss: 22.842737197875977
Iteration 19947 => Loss: 22.842737197875977
Iteration 19948 => Loss: 22.842737197875977
Iteration 19949 => Loss: 22.842737197875977
Iteration 19950 => Loss: 22.842737197875977
Iteration 19951 => Loss: 22.842737197875977
Iteration 19952 => Loss: 22.842737197875977
Iteration 19953 => Loss: 22.842737197875977
Iteration 19954 => Loss: 22.842737197875977
Iteration 19955 => Loss: 22.842737197875977
Iteration 19956 => Loss: 22.842737197875977
Iteration 19957 => Loss: 22.842737197875977
Iteration 19958 => Loss: 22.842737197875977
Iteration 19959 => Loss: 22.842737197875977
Iteration 19960 => Loss: 22.842737197875977
Iteration 19961 => Loss: 22.842737197875977
Iteration 19962 => Loss: 22.842737197875977
Iteration 19963 => Loss: 22.842737197875977
Iteration 19964 => Loss: 22.842737197875977
Iteration 19965 => Loss: 22.842737197875977
Iteration 19966 => Loss: 22.842737197875977
Iteration 19967 => Loss: 22.842737197875977
Iteration 19968 => Loss: 22.842737197875977
Iteration 19969 => Loss: 22.842737197875977
Iteration 19970 => Loss: 22.842737197875977
Iteration 19971 => Loss: 22.842737197875977
Iteration 19972 => Loss: 22.842737197875977
Iteration 19973 => Loss: 22.842737197875977
Iteration 19974 => Loss: 22.842737197875977
Iteration 19975 => Loss: 22.842737197875977
Iteration 19976 => Loss: 22.842737197875977
Iteration 19977 => Loss: 22.842737197875977
Iteration 19978 => Loss: 22.842737197875977
Iteration 19979 => Loss: 22.842737197875977
Iteration 19980 => Loss: 22.842737197875977
Iteration 19981 => Loss: 22.842737197875977
Iteration 19982 => Loss: 22.842737197875977
Iteration 19983 => Loss: 22.842737197875977
Iteration 19984 => Loss: 22.842737197875977
Iteration 19985 => Loss: 22.842737197875977
Iteration 19986 => Loss: 22.842737197875977
Iteration 19987 => Loss: 22.842737197875977
Iteration 19988 => Loss: 22.842737197875977
Iteration 19989 => Loss: 22.842737197875977
Iteration 19990 => Loss: 22.842737197875977
Iteration 19991 => Loss: 22.842737197875977
Iteration 19992 => Loss: 22.842737197875977
Iteration 19993 => Loss: 22.842737197875977
Iteration 19994 => Loss: 22.842737197875977
Iteration 19995 => Loss: 22.842737197875977
Iteration 19996 => Loss: 22.842737197875977
Iteration 19997 => Loss: 22.842737197875977
Iteration 19998 => Loss: 22.842737197875977
Iteration 19999 => Loss: 22.842737197875977
Iteration 20000 => Loss: 22.842737197875977
{#Nx.Tensor<
   f32
   1.081164002418518
 >,
 #Nx.Tensor<
   f32
   13.171684265136719
 >}
C3Test.predict(n, weight_t, bias_t)
#Nx.Tensor<
  f32
  34.79496383666992
>
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment