Created
July 2, 2020 03:54
Perception study stimulus rendering using the Python interface to Mitsuba 2
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import mitsuba | |
mitsuba.set_variant(value="scalar_rgb") | |
import numpy as np | |
random = np.random.RandomState(seed=19) | |
import imageio | |
# ROOM GEOMETRY | |
room_width = 5 | |
room_halfwidth = room_width / 2 | |
room_height = 5 | |
room_halfheight = room_height / 2 | |
room_depth = 10 | |
room_halfdepth = room_depth / 2 | |
(x, y, z) = ((1, 0, 0), (0, 1, 0), (0, 0, 1)) | |
null_rotation = {"axis": x, "angle": 0} | |
surface_transforms = { | |
"floor": { | |
"scale": (room_halfwidth, room_halfdepth), | |
"rotation": {"axis": x, "angle": -90}, | |
"translation": (0, -room_halfwidth, -room_halfdepth), | |
}, | |
"roof": { | |
"scale": (room_halfwidth, room_halfdepth), | |
"rotation": {"axis": x, "angle": 90}, | |
"translation": (0, room_halfwidth, -room_halfdepth), | |
}, | |
"back": { | |
"scale": (room_halfwidth, room_halfheight), | |
"rotation": null_rotation, | |
"translation": (0, 0, -room_depth), | |
}, | |
"left": { | |
"scale": (room_halfdepth, room_halfheight), | |
"rotation": {"axis": y, "angle": 90}, | |
"translation": (-room_halfwidth, 0, -room_halfdepth), | |
}, | |
"right": { | |
"scale": (room_halfdepth, room_halfheight), | |
"rotation": {"axis": y, "angle": -90}, | |
"translation": (room_halfwidth, 0, -room_halfdepth), | |
}, | |
"mid_left": { | |
"scale": (room_halfwidth * 0.3, room_halfheight), | |
"rotation": null_rotation, | |
"translation": (room_halfwidth * 0.3 - room_halfwidth, 0, -room_halfdepth), | |
}, | |
"mid_right": { | |
"scale": (room_halfwidth * 0.3, room_halfheight), | |
"rotation": null_rotation, | |
"translation": (room_halfwidth * 0.7, 0, -room_halfdepth), | |
}, | |
"mid_top": { | |
"scale": (room_halfwidth * 0.4, room_halfheight * 0.2), | |
"rotation": null_rotation, | |
"translation": (0, room_halfheight * 0.8, -room_halfdepth), | |
}, | |
} | |
# COMPARISON GEOMETRY | |
surface_transforms["comparison"] = { | |
"scale": (room_halfwidth * 0.3 / 2,) * 2, | |
"rotation": null_rotation, | |
"translation": (room_halfwidth * 0.65, 0, -room_halfdepth + 0.01), | |
} | |
# STANDARD GEOMETRY | |
standard_depth = 8.5 | |
surface_transforms["standard"] = { | |
"scale": surface_transforms["comparison"]["scale"], | |
"rotation": null_rotation, | |
"translation": (0, 0, -standard_depth), | |
} | |
# CREATE SURFACES | |
surfaces = { | |
surface_name: { | |
"type": "rectangle", | |
"to_world": ( | |
mitsuba.core.Transform4f.translate(v=surface["translation"]) | |
* mitsuba.core.Transform4f.rotate( | |
axis=surface["rotation"]["axis"], angle=surface["rotation"]["angle"] | |
) | |
* mitsuba.core.Transform4f.scale(v=surface["scale"] + (1,)) | |
), | |
} | |
for (surface_name, surface) in surface_transforms.items() | |
} | |
# SPHERES | |
n_spheres = 10 | |
sphere_r = 0.25 | |
sphere_d = sphere_r * 2 | |
sphere_positions = random.uniform( | |
low=( | |
-room_halfwidth + sphere_d, | |
-room_halfheight + sphere_d, | |
-room_depth + sphere_d, | |
), | |
high=(room_halfwidth - sphere_d, room_halfheight - sphere_d, sphere_d * 2), | |
size=(n_spheres, 3), | |
) | |
spheres = { | |
f"sphere{sphere_num:d}": { | |
"type": "sphere", | |
"center": sphere_position, | |
"radius": sphere_r, | |
} | |
for (sphere_num, sphere_position) in enumerate(sphere_positions, 1) | |
} | |
surfaces.update(spheres) | |
# MATERIALS - ROOM | |
check_size = 0.5 | |
check_upsample_factor = 100 | |
for (surface_name, surface) in surfaces.items(): | |
if surface_name in ["comparison", "standard"] or surface_name.startswith("sphere"): | |
continue | |
surface_scale = np.array(surface_transforms[surface_name]["scale"]) | |
surface_size = surface_scale * 2 | |
n_checks = (surface_size / check_size).astype("int") | |
checks = random.uniform(low=0.25, high=1.0, size=n_checks[::-1]) | |
texture = np.around(checks * 255).astype("uint8") | |
texture = np.repeat( | |
np.repeat(texture, repeats=check_upsample_factor, axis=0), | |
repeats=check_upsample_factor, | |
axis=1, | |
) | |
texture_path = surface_name + "_texture.png" | |
imageio.imsave(texture_path, texture) | |
surface["material"] = { | |
"type": "diffuse", | |
"reflectance": {"type": "bitmap", "filename": texture_path, "raw": True}, | |
} | |
if "mid" in surface_name: | |
surface["material"] = { | |
"type": "twosided", | |
"reflectance": surface["material"], | |
} | |
# MATERIALS - COMPARISON AND STANDARD | |
for (surface_name, surface) in surfaces.items(): | |
if surface_name not in ["comparison", "standard"]: | |
continue | |
surface["material"] = { | |
"type": "diffuse", | |
"reflectance": {"type": "spectrum", "value": 1.0}, | |
} | |
# MATERIALS - SPHERES | |
for (surface_name, surface) in surfaces.items(): | |
if not surface_name.startswith("sphere"): | |
continue | |
surface["material"] = {"type": "plastic", "diffuse_reflectance": 0.0} | |
# LIGHTS | |
light_r = 0.625 | |
light_strength = 3.0 | |
lights = { | |
f"{side:s}_light": { | |
"type": "sphere", | |
"center": [ | |
(room_halfwidth - light_r) * side_sign, | |
room_halfwidth - light_r, | |
-room_halfdepth - light_r - 1, | |
], | |
"radius": light_r, | |
"light": { | |
"type": "area", | |
"radiance": {"type": "spectrum", "value": light_strength}, | |
}, | |
} | |
for (side, side_sign) in zip(["left", "right"], [-1, +1]) | |
} | |
# AMBIENT | |
ambient_strength = 0.05 | |
lights["ambient"] = { | |
"type": "constant", | |
"radiance": ambient_strength, | |
} | |
# CAMERA | |
camera = { | |
"type": "perspective", | |
"to_world": mitsuba.core.Transform4f.look_at( | |
origin=(0, 0, room_halfheight), target=(0, 0, -room_depth), up=(0, 1, 0), | |
), | |
"fov": 45.0, | |
} | |
camera["film"] = { | |
"type": "hdrfilm", | |
"pixel_format": "luminance", | |
"width": 512, | |
"height": 512, | |
} | |
camera["sampler"] = {"type": "independent", "sample_count": 4096} | |
integrator = {"type": "path", "max_depth": -1} | |
scene = {"type": "scene", "camera": camera, "integrator": integrator} | |
scene.update(surfaces) | |
scene.update(lights) | |
mitsuba_scene = mitsuba.core.xml.load_dict(scene) | |
(sensor,) = mitsuba_scene.sensors() | |
integrator = mitsuba_scene.integrator() | |
integrator.render(scene=mitsuba_scene, sensor=sensor) | |
image = ( | |
sensor.film() | |
.bitmap(raw=True) | |
.convert( | |
pixel_format=mitsuba.core.Bitmap.PixelFormat.Y, | |
component_format=mitsuba.core.Struct.Type.UInt8, | |
srgb_gamma=True, | |
) | |
) | |
image.write("snyder_scene_demo_render.png") |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment