Skip to content

Instantly share code, notes, and snippets.

@ylegall
Created October 15, 2020 20:39
Show Gist options
  • Star 7 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save ylegall/d00b22f0bfefff5aa5a47daae2cfb099 to your computer and use it in GitHub Desktop.
Save ylegall/d00b22f0bfefff5aa5a47daae2cfb099 to your computer and use it in GitHub Desktop.
package org.ygl.openrndr.demos
import org.openrndr.application
import org.openrndr.color.ColorRGBa
import org.openrndr.color.mix
import org.openrndr.draw.BlendMode
import org.openrndr.draw.DrawPrimitive
import org.openrndr.draw.VertexElementType
import org.openrndr.draw.renderTarget
import org.openrndr.draw.shadeStyle
import org.openrndr.draw.vertexBuffer
import org.openrndr.draw.vertexFormat
import org.openrndr.extra.compositor.compose
import org.openrndr.extra.compositor.draw
import org.openrndr.extra.compositor.layer
import org.openrndr.extra.compositor.post
import org.openrndr.extra.fx.blur.FrameBlur
import org.openrndr.extra.fx.blur.GaussianBloom
import org.openrndr.extra.gui.GUI
import org.openrndr.extra.parameters.Description
import org.openrndr.extra.parameters.DoubleParameter
import org.openrndr.extras.camera.OrbitalCamera
import org.openrndr.extras.camera.applyTo
import org.openrndr.ffmpeg.VideoWriter
import org.openrndr.math.Spherical
import org.openrndr.math.Vector2
import org.openrndr.math.Vector3
import org.openrndr.math.mix
import org.openrndr.math.transforms.transform
import org.openrndr.shape.Segment3D
import org.ygl.fastnoise.FastNoise
import org.ygl.fastnoise.FractalType
import org.ygl.openrndr.utils.BokehDepthBlur
import org.ygl.openrndr.utils.ColorMap
import org.ygl.openrndr.utils.cubicPulse
import org.ygl.openrndr.utils.isolatedWithTarget
import kotlin.math.PI
import kotlin.math.cos
import kotlin.math.sin
import kotlin.random.Random
private const val WIDTH = 920
private const val HEIGHT = 920
private const val TOTAL_FRAMES = 360 * 6
private const val LOOPS = 1
private const val DELAY_FRAMES = 60
private const val RECORDING = true
fun main() = application {
configure {
width = WIDTH
height = HEIGHT
}
program {
var time = 0.0
val noise = FastNoise().apply {
seed = 2
fractalType = FractalType.FBM
}
val numPaths = 500
val pointsPerPath = 512
val verticalScale = 0.2
val rng = Random(1)
val colorMap1 = ColorMap(listOf(
"207db0","38a3a5","57cc99","80ed99","abff8d"
))
val colorMap2 = ColorMap(listOf(
"f72585","7209b7","3a0ca3","4361ee","4cc9f0"
))
val planeSize = 200.0
val planeDivisions = 40
val planeVertexBuffer = vertexBuffer(vertexFormat {
position(3)
}, planeDivisions * planeDivisions * 6)
val particleSize = 0.9
val particleGeometry = vertexBuffer(vertexFormat {
position(3)
}, 13).apply {
put {
write(Vector3.ZERO)
for (i in 0 until 12) {
val angle = 2 * PI * i / 11.0
write(Vector3(particleSize * cos(angle), particleSize * sin(angle), 0.0))
}
}
}
val particleInstances = vertexBuffer(vertexFormat {
attribute("transform", VertexElementType.MATRIX44_FLOAT32)
color(4)
}, numPaths)
val pathTimeOffsets = List(numPaths) { rng.nextDouble() }
val camera = OrbitalCamera(
eye = Vector3.fromSpherical(Spherical(0.0, 70.0, planeSize)),
lookAt = Vector3.ZERO,
fov = 60.0
)
keyboard.keyDown.listen {
if (it.name == "p") {
println(Vector3.fromSpherical(camera.spherical))
println(camera.lookAt)
}
}
val bloom = GaussianBloom()
val blur = BokehDepthBlur()
val blurTarget = renderTarget(width, height) { colorBuffer(); depthBuffer() }
val params = @Description("params") object {
@DoubleParameter("scale", 0.0, 4.0, precision = 2)
var scale = 0.3
@DoubleParameter("delta", 0.001, 1.0, precision = 3)
var delta = scale
@DoubleParameter("noise magnitude", 0.0, 400.0, precision = 1)
var mag = 200.0
@DoubleParameter("step", 0.0, 2.0, precision = 3)
var step = 0.7
}
fun noiseValue(pos: Vector2, t: Double = 0.0): Double {
return params.mag * noise.getSimplexFractal(
(params.scale * pos.x).toFloat(),
(params.scale * pos.y).toFloat(),
).toDouble()
}
fun pos2Dto3D(pos: Vector2): Vector3 {
val height = verticalScale * noiseValue(pos)
return Vector3(pos.x, height, pos.y)
}
fun computeCurl(pos: Vector2, t: Double = 0.0): Vector2 {
val dx = (
noiseValue(pos + Vector2(params.delta, 0.0), t) -
noiseValue(pos - Vector2(params.delta, 0.0), t)
) / (2 * params.delta)
val dy = (
noiseValue(pos + Vector2(0.0, params.delta), t) -
noiseValue(pos - Vector2(0.0, params.delta), t)
) / (2 * params.delta)
return Vector2(dy, -dx) * params.step
}
// build the plane mesh
Array(planeDivisions) { row ->
val y = -planeSize + (2.0 * planeSize * row) / (planeDivisions - 1.0)
Array(planeDivisions) { col ->
val x = -planeSize + (2.0 * planeSize * col) / (planeDivisions - 1.0)
pos2Dto3D(Vector2(x, y)) - Vector3(0.0, 1.0, 0.0)
}
}.let { positions ->
planeVertexBuffer.put {
for (row in 0 until planeDivisions - 1) {
for (col in 0 until planeDivisions - 1) {
val p00 = positions[row][col]
val p01 = positions[row + 1][col]
val p10 = positions[row][col + 1]
val p11 = positions[row + 1][col + 1]
write(p11)
write(p10)
write(p00)
write(p00)
write(p01)
write(p11)
}
}
}
}
// compute paths
val paths = List(numPaths) {
var pos = Vector2(rng.nextDouble(-planeSize, planeSize), rng.nextDouble(-planeSize, planeSize))
val points = mutableListOf(pos2Dto3D(pos))
while (points.size < pointsPerPath && pos.x in -planeSize .. planeSize && pos.y in -planeSize .. planeSize) {
val curl = computeCurl(pos)
pos += curl
points.add(pos2Dto3D(pos))
}
points.asSequence().zipWithNext().map { Segment3D(it.first, it.second) }.toList()
}
val pathColorOffsets = List(paths.size) { rng.nextDouble() }
fun List<Segment3D>.getPathPosition(t: Double): Vector3 {
val pathTime = size * t
val index = pathTime.toInt() % size
val segment = this[index]
return mix(segment.start, segment.end, pathTime % 1.0)
}
fun update() {
time = ((frameCount - 1) % TOTAL_FRAMES) / TOTAL_FRAMES.toDouble()
camera.update(deltaTime)
//val thetaPhase = 0.5 + 0.5 * sin(2 * PI * time)
val phiPhase = 0.5 + 0.5 * cos(4 * PI * time)
val eyePosition = Spherical(80 + 360 * time, 85 - 25 * phiPhase, planeSize - 10.0)
//camera.setView(camera.lookAt, eyePosition, camera.fov)
val lookAt = Vector3(50 * cos(2 * PI * time), 0.0, 50 * sin(2 * PI * time))
camera.setView(lookAt, eyePosition, camera.fov)
particleInstances.put {
for (p in paths.indices) {
val path = paths[p]
val timeOffset = (time + pathTimeOffsets[p]) % 1.0
val pos = path.getPathPosition(timeOffset)
write(transform {
translate(pos)
})
write(ColorRGBa.WHITE.opacify(cubicPulse(0.5, 0.5, timeOffset)))
}
}
}
val composite = compose {
layer {
draw {
drawer.isolatedWithTarget(blurTarget) {
camera.applyTo(drawer)
drawer.clear(ColorRGBa.BLACK)
drawer.drawStyle.blendMode = BlendMode.ADD
// draw the ground plane
drawer.fill = ColorRGBa.BLACK
drawer.vertexBuffer(planeVertexBuffer, DrawPrimitive.TRIANGLES)
val colorPhase = 0.5 + 0.5 * sin(2 * PI * time)
val pathColors = List(paths.size) { i ->
mix(colorMap1[pathColorOffsets[i]], colorMap2[pathColorOffsets[i]], colorPhase)
}
// draw the contours
for (p in paths.indices) {
val path = paths[p]
val segmentWeights = MutableList(path.size) { 0.0 }
val segmentColors = MutableList(path.size) { ColorRGBa.WHITE }
for (i in path.indices) {
val pathProgress = i / (path.size - 1.0)
val mag = cubicPulse(0.5, 0.5, 8 * (time + pathTimeOffsets[p]) % 1.0)
//val mag = cubicPulse(0.5, 0.5, pathProgress)
val fade = cubicPulse(0.5, 0.5, pathProgress)
segmentWeights[i] = fade * (3.0 + 6.0 * mag)
segmentColors[i] = pathColors[p].opacify(fade)
}
drawer.segments(path, segmentWeights, segmentColors)
}
// draw the particles
drawer.fill = ColorRGBa.WHITE
drawer.shadeStyle = shadeStyle {
vertexTransform = """
x_viewMatrix *= i_transform;
x_viewMatrix[0].xyz = vec3(1, 0, 0);
x_viewMatrix[1].xyz = vec3(0, 1, 0);
x_viewMatrix[2].xyz = vec3(0, 0, 1);
""".trimIndent()
fragmentTransform = """
x_fill = vi_color;
x_fill.rgb *= smoothstep(0.0, 0.5, 1 - length(va_position.xy));
""".trimIndent()
}
drawer.vertexBufferInstances(
listOf(particleGeometry),
listOf(particleInstances),
DrawPrimitive.TRIANGLE_FAN,
particleInstances.vertexCount
)
}
drawer.image(blurTarget.colorBuffer(0))
}
post(blur) {
depthBuffer = blurTarget.depthBuffer!!
focusPoint = 0.25 + 0.05 * cos(4 * PI * time)
focusScale = 0.1
}
post(bloom) {
sigma = 0.1
shape = 0.1
}
post(FrameBlur()) {
blend = 0.4
}
}
}
val videoTarget = renderTarget(width, height) { colorBuffer() }
val videoWriter = VideoWriter.create()
.size(width, height)
.frameRate(60)
.output("video/curl-noise-contours.mp4")
if (RECORDING) {
videoWriter.start()
} else {
//extend(OrbitalControls(camera))
//extend(GUI()) {
//add(params)
//add(blur)
//}
}
extend {
update()
if (RECORDING) {
drawer.isolatedWithTarget(videoTarget) {
composite.draw(this)
}
drawer.image(videoTarget.colorBuffer(0))
if (frameCount > DELAY_FRAMES) {
videoWriter.frame(videoTarget.colorBuffer(0))
}
if (frameCount >= TOTAL_FRAMES * LOOPS + DELAY_FRAMES) {
videoWriter.stop()
application.exit()
}
} else {
composite.draw(drawer)
}
}
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment