Skip to content

Instantly share code, notes, and snippets.

@kassankar
Last active April 11, 2022 13:46
Show Gist options
  • Save kassankar/70d62384d05b00fa8b9486ed862a4784 to your computer and use it in GitHub Desktop.
Save kassankar/70d62384d05b00fa8b9486ed862a4784 to your computer and use it in GitHub Desktop.
OFDMModulator() bug
import tensorflow as tf
import numpy as np
from tensorflow.keras import Model
# Import Sionna
try:
import sionna
except ImportError as e:
import os
os.system("pip install sionna")
import sionna
print("sionna_ver: ", sionna.__version__)
from sionna.utils import BinarySource, ebnodb2no, log10, expand_to_rank, insert_dims
from sionna.mapping import Mapper, Demapper, Constellation
from sionna.mimo import StreamManagement
#ofdm
from sionna.ofdm import ResourceGrid, ResourceGridMapper,OFDMModulator,OFDMDemodulator
###############################################
## Channel configuration
###############################################
carrier_frequency = 3.5e9 # Hz
ebno_db_min = -3
ebno_db_max = 5
############################################
## OFDM waveform configuration
subcarrier_spacing = 30e3 # Hz
fft_size = 72 # Number of subcarriers forming the resource grid, including the null-subcarrier and the guard bands
num_ofdm_symbols = 14 # Number of OFDM symbols forming the resource grid
cyclic_prefix_length = 0 # Simulation in frequency domain. This is useless
############################################
## Training configuration
training_batch_size = 128 # Training batch size
resource_grid = ResourceGrid(num_ofdm_symbols = num_ofdm_symbols,
fft_size = fft_size,
subcarrier_spacing = subcarrier_spacing,
num_tx = 1,
num_streams_per_tx = 1,
cyclic_prefix_length = cyclic_prefix_length
)
###############################################
# Modulation and coding configuration
###############################################
num_bits_per_symbol = 4
modulation_order = 2**num_bits_per_symbol
coderate = 0.5
n = int(resource_grid.num_data_symbols*num_bits_per_symbol)
num_symbols_per_codeword = n//num_bits_per_symbol
k = int(n*coderate)
# Test
## Transmitter
tf.random.set_seed(1)
binary_source = BinarySource()
mapper = Mapper("qam", num_bits_per_symbol)
demapper = Demapper("app","qam", num_bits_per_symbol)
rg_mapper = ResourceGridMapper(resource_grid)
ofdm_mod = OFDMModulator(cyclic_prefix_length)
ofdm_demod = OFDMDemodulator(fft_size,0,cyclic_prefix_length)
batch_size = 64
ebno_db = tf.fill([batch_size], 5.0)
no = ebnodb2no(ebno_db, num_bits_per_symbol, coderate)
## Transmitter
# Generate codewords
c = binary_source([batch_size, 1, 1, n])
print("c shape: ", c.shape)
# Map bits to QAM symbols
x = mapper(c)
print("x shape: ", x.shape)
# Map the QAM symbols to a resource grid
x_rg = rg_mapper(x)
print("x_rg shape: ", x_rg.shape)
# Time-Domain representation of an OFDM resource grid
x_time = ofdm_mod(x_rg)
print("x_time shape: ", x_time.shape)
# Frequency-Domain representation of an OFDM
x_f = ofdm_demod(x_time)
print("x_f shape: ", x_f.shape)
class E2ESystem(Model):
def __init__(self, system, training=False):
super().__init__()
######################################
## Transmitter
self._binary_source = BinarySource()
# Trainable constellation
constellation = Constellation("qam", num_bits_per_symbol, trainable=True)
self.constellation = constellation
self._mapper = Mapper(constellation=constellation)
self._rg_mapper = ResourceGridMapper(resource_grid)
# Time-Domain representation of an OFDM
self._ofdm_mod = OFDMModulator(cyclic_prefix_length)
# freq-Domain representation of an OFDM
self._ofdm_demod = OFDMDemodulator(fft_size,0,cyclic_prefix_length)
@tf.function
def call(self, batch_size, ebno_db):
# If `ebno_db` is a scalar, a tensor with shape [batch size] is created as it is what is expected by some layers
if len(ebno_db.shape) == 0:
ebno_db = tf.fill([batch_size], ebno_db)
######################################
## Transmitter
no = ebnodb2no(ebno_db, num_bits_per_symbol, coderate)
c = self._binary_source([batch_size, 1, 1, n])
x = self._mapper(c)
x_rg = self._rg_mapper(x)
print(x_rg.shape)
# Time-Domain representation of an OFDM resource grid
x_time = self._ofdm_mod(x_rg)
print(x_time.shape)
######################################
# freq-Domain representation of an OFDM
x_f = self._ofdm_demod(x_time)
print(x_f.shape)
model = E2ESystem('neural-receiver')
ebno_db = tf.random.uniform(shape=[], minval=ebno_db_min, maxval=ebno_db_max)
# Forward pass
with tf.GradientTape() as tape:
rate = model(training_batch_size, ebno_db)
loss = -rate
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment