Skip to content

Instantly share code, notes, and snippets.

@bollwyvl
Last active February 18, 2020 02:34
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save bollwyvl/802c4bee4364bd24930c8c1b9142ee81 to your computer and use it in GitHub Desktop.
Save bollwyvl/802c4bee4364bd24930c8c1b9142ee81 to your computer and use it in GitHub Desktop.
IPython in a Blender
__pycache__/
.doit.*
.ipynb_checkpoints/
*.egg-info/
build/
dist/
envs/
lib/

IPython in a Blender

Binder

This starts Blender (potentially under xvfb) and allows for interactive computing inside of JupyterLab.

Based on:

Features:

  • installable kernelspec(s)
  • kernel icons
  • runs on linux desktop with Blender
  • synchronized object selection
  • initial updating of a scene tree with ipytree
  • simple float sliders
  • simple, slow rendering as a static png
  • runs in binder with xvfb
  • in-browser interactive preview with pythreejs
  • adding objects
  • textures
  • property viewer with ipysheet or ipytree
  • vnc pass-through with jupyter-desktop-server
blender=2.79.b+dfsg0-1ubuntu1.18.04.1
xvfb
import shutil
import subprocess
import sys
from pathlib import Path
DODO = Path(__file__)
HERE = DODO.parent
PY = sys.executable
OK = 0
# things we check in
LABEXT = HERE / "labex.txt"
SETUP_PY = HERE / "setup.py"
KERNEL_PY = HERE / "ipyblender.py"
ALL_PY = [SETUP_PY, KERNEL_PY, DODO]
# things we don't check in
BUILD = HERE / "build"
BUILD.exists() or BUILD.mkdir()
INSTALLED_LABEXTENSIONS = BUILD / "labextensions.log"
LAB_BUILD_LOG = BUILD / "lab.build.log"
PIP_FREEZE = BUILD / "pip.freeze.log"
PIP_INSTALL = BUILD / "pip.install.log"
KERNEL_INSTALLED = BUILD / "ipyblender.install.log"
PY_FORMATTED = BUILD / "formatted.log"
# things other people make
LAB_BUILD = Path(sys.prefix) / "share" / "jupyter" / "lab"
YARN_LOCK = LAB_BUILD / "staging" / "yarn.lock"
# tasks
def task_py_format():
return {
"file_dep": [KERNEL_PY, DODO, SETUP_PY],
"targets": [PY_FORMATTED],
"actions": [py_format],
"clean": _cleanit(PY_FORMATTED),
}
def task_lab_extend():
return {
"file_dep": [LABEXT],
"targets": [INSTALLED_LABEXTENSIONS],
"actions": [lab_extend],
"clean": _cleanit(INSTALLED_LABEXTENSIONS, LAB_BUILD / "extensions"),
}
def task_lab_build():
return {
"file_dep": [INSTALLED_LABEXTENSIONS],
"targets": [YARN_LOCK, LAB_BUILD_LOG],
"actions": [lab_build],
"clean": _cleanit(LAB_BUILD_LOG, LAB_BUILD),
}
def task_pip_freeze():
return {
"file_dep": [SETUP_PY],
"targets": [PIP_FREEZE],
"actions": [pip_freeze],
"clean": _cleanit(PIP_FREEZE),
}
def task_setup_py():
return {
"file_dep": [PIP_FREEZE, SETUP_PY, KERNEL_PY, PY_FORMATTED],
"actions": [pip_install],
"clean": _cleanit(PIP_FREEZE, PIP_INSTALL),
}
def task_kernel_install():
return {
"file_dep": [PIP_FREEZE, SETUP_PY, KERNEL_PY],
"targets": [KERNEL_INSTALLED],
"actions": [kernel_install],
"clean": _cleanit(KERNEL_INSTALLED),
}
# implementations
def lab_extend():
args = [PY, "-m", "jupyter", "labextension", "install", "--no-build", "--debug"]
exts = LABEXT.read_text().splitlines()
return _logit([*args, *exts], INSTALLED_LABEXTENSIONS, "to install labextensions")
def lab_build():
args = [
PY,
"-m",
"jupyter",
"lab",
"build",
"--dev-build=False",
"--minimize=True",
"--debug",
]
return _logit(args, LAB_BUILD_LOG, "to build lab")
def pip_freeze():
args = [PY, "-m", "pip", "freeze"]
return _logit(args, PIP_FREEZE, "to freeze pip")
def pip_install():
args = [PY, "-m", "pip", "install", "-e", ".", "--no-deps", "--ignore-installed"]
return _logit(args, PIP_INSTALL, "to do a dev setup")
def kernel_install():
args = ["ipyblender-install"]
return _logit(args, KERNEL_INSTALLED, "to install kernel")
def py_format():
_logit(["isort", "-rc", *ALL_PY], PY_FORMATTED, "to sort imports")
_logit(["black", *ALL_PY], PY_FORMATTED, "to format python")
# utilities
def _cleanit(*paths):
def clean():
for path in paths:
if not path.exists():
continue
if path.is_dir():
shutil.rmtree(path)
else:
path.unlink()
return True
return [clean]
def _logit(args, outfile, msg):
""" TODO: streaming
"""
outfile.exists() and outfile.unlink()
proc = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = proc.communicate()
log = f"""{stdout.decode("utf-8")}{stderr.decode("utf-8")}"""
assert proc.returncode == OK, f"FAILED {msg}\n\n{log}\n"
outfile.write_text(log)
return True
name: blender-binder
channels:
- conda-forge
dependencies:
# this is to match the version of blender that comes from `apt` on binder
# namely,
# - binder=2.79.b+dfsg0-1ubuntu1.18.04.1
# - libpython=3.6.9-1~18.04
- python ==3.6.9
# build deps
- black
- doit
- isort
- nodejs
- pip
- setuptools
# core
- ipython
- jupyterlab ==1.2.6
# demo bling
- importnb
- ipyleaflet
- ipysheet
- ipytree
- ipywidgets
- lxml
- pandas
- pythreejs
- shapely
import json
import os
import shutil
import site
import subprocess
import sys
from pathlib import Path
__version__ = "0.1.0"
OK = 0
class WillNotBlend(Exception):
pass
# these run in the host environmment
def blender_bin():
blender_path = shutil.which("blender")
if blender_path is None:
raise WillNotBlend("couldn't even find blender")
return Path(blender_path)
def blender_version(force_bin=None):
return (
subprocess.check_output([force_bin or blender_bin(), "--version"])
.decode("utf-8")
.splitlines()[0]
)
def install_kernelspecs():
install_kernelspec()
if shutil.which("xvfb-run"):
install_kernelspec(xvfb=True)
def install_kernelspec(xvfb=False):
import pkg_resources
kernel_path = Path(sys.prefix) / "share" / "jupyter" / "kernels" / "blender"
blender_from_env = Path(os.environ.get("IPYBLENDER_BLENDER_BIN", blender_bin()))
assert blender_from_env.exists(), f"blender not found at {blender_from_env}"
display_name = blender_version(blender_from_env)
argv = [sys.executable, "-m", "ipyblender", "-f", "{connection_file}"]
if xvfb:
kernel_path = (
Path(sys.prefix) / "share" / "jupyter" / "kernels" / "blender-xvfb"
)
display_name += " (xvfb)"
argv = ["xvfb-run", "-a", *argv]
kernel_path.exists() or kernel_path.mkdir(parents=True)
spec_path = kernel_path / "kernel.json"
spec = {
"argv": list(map(str, argv)),
"display_name": display_name,
"language": "python",
"env": {
"IPYBLENDER_PYTHON_SITE_PACKAGES": site.getsitepackages()[0],
"IPYBLENDER_BLENDER": str(blender_from_env),
},
}
path_vars = ["LD_LIBRARY_PATH", "LD_LINK_PATH", "LD_PATH"]
for path_var in path_vars:
if path_var in os.environ:
spec["env"][path_var] = os.environ[path_var]
if path_var == "LD_LIBRARY_PATH" and "CONDA_PREFIX" in os.environ:
spec["env"][path_var] = os.path.pathsep.join(
[
os.path.join(os.environ["CONDA_PREFIX"], "lib"),
spec["env"][path_var],
]
)
spec_path.write_text(json.dumps(spec, indent=2))
for res in ["32", "64"]:
fname = f"logo-{res}x{res}.png"
src = Path(pkg_resources.resource_filename("ipyblender", fname))
if src.exists():
dest = kernel_path / fname
shutil.copy2(src, dest)
return OK
# these run in the blender environment
def initialize_blender_loop():
import bpy
import asyncio
import heapq
import socket
import subprocess
import time
import os
import sys
def _run_once(self):
"""Run one full iteration of the event loop.
This calls all currently ready callbacks, polls for I/O,
schedules the resulting callbacks, and finally schedules
'call_later' callbacks.
This is copied verbatim from the standard library code, with
only one little change, namely the default timeout value.
"""
# Remove delayed calls that were cancelled from head of queue.
while self._scheduled and self._scheduled[0]._cancelled:
heapq.heappop(self._scheduled)
# Set default timeout for call to "select" API. In the original
# standard library code this timeout is 0, meaning select with block
# until anything happens. Can't have that with foreign event loops!
timeout = 1.0 / 100.0
if self._ready:
timeout = 0
elif self._scheduled:
# Compute the desired timeout.
when = self._scheduled[0]._when
deadline = max(0, when - self.time())
if timeout is None:
timeout = deadline
else:
timeout = min(timeout, deadline)
event_list = self._selector.select(timeout)
self._process_events(event_list)
# Handle 'later' callbacks that are ready.
end_time = self.time() + self._clock_resolution
while self._scheduled:
handle = self._scheduled[0]
if handle._when >= end_time:
break
handle = heapq.heappop(self._scheduled)
self._ready.append(handle)
# This is the only place where callbacks are actually *called*.
# All other places just add them to ready.
# Note: We run all currently scheduled callbacks, but not any
# callbacks scheduled by callbacks run this time around --
# they will be run the next time (after another I/O poll).
# Use an idiom that is threadsafe without using locks.
ntodo = len(self._ready)
for i in range(ntodo):
handle = self._ready.popleft()
if not handle._cancelled:
handle._run()
handle = None # Needed to break cycles when an exception
class AsyncioBridgeOperator(bpy.types.Operator):
"""Operator which runs its self from a timer"""
bl_idname = "bpy.start_asyncio_bridge"
bl_label = "Start Asyncio Modal Operator"
def __init__(self):
super().__init__()
def __del__(self):
pass
def modal(self, context, event):
if event.type == "TIMER":
_run_once(self.loop)
else:
for listener_id, listener in self.listeners.items():
fire, catch = listener.check_event(event)
if fire:
listener.flag.set()
# In the case of firing an event, it is important to
# quit the listener processing in this loop iteration.
# This assures that only one asyncio.Event flag is
# set per iteration.
if catch:
return {"RUNNING_MODAL"}
else:
return {"PASS_THROUGH"}
return {"PASS_THROUGH"}
def execute(self, context):
self.types = {}
self.listeners = {}
self.listener_id = 0
self.loop = asyncio.get_event_loop()
self.loop.operator = self
wm = context.window_manager
wm.modal_handler_add(self)
self._timer = wm.event_timer_add(0.005, window=context.window)
return {"RUNNING_MODAL"}
def invoke(self, context, event):
self.execute(context)
return {"RUNNING_MODAL"}
def cancel(self, context):
wm = context.window_manager
wm.event_timer_remove(self._timer)
def add_listener(self, listener):
self.listeners[self.listener_id] = listener
listener.id = self.listener_id
self.listener_id += 1
def remove_listener(self, listener):
del self.listeners[listener.id]
def register():
try:
bpy.utils.register_class(AsyncioBridgeOperator)
except:
pass
def get_event_loop():
register()
loop = asyncio.get_event_loop()
if not hasattr(loop, "operator") or loop.operator is None:
bpy.ops.bpy.start_asyncio_bridge("INVOKE_DEFAULT")
return loop
def unregister():
bpy.utils.unregister_class(AsyncioBridgeOperator)
return get_event_loop()
async def start_kernel_app():
from tornado import ioloop
from ipykernel.kernelapp import IPKernelApp
sys.argv = sys.argv[sys.argv.index("-f") - 1 :]
app = IPKernelApp.instance()
app.initialize()
if app.poller is not None:
app.poller.start()
app.kernel.start()
app.io_loop = ioloop.IOLoop.current()
def launch_kernel():
IPYBLENDER_PYTHON_SITE_PACKAGES = os.environ.get("IPYBLENDER_PYTHON_SITE_PACKAGES")
if IPYBLENDER_PYTHON_SITE_PACKAGES is not None:
sys.path = [IPYBLENDER_PYTHON_SITE_PACKAGES] + list(sys.path)
loop = initialize_blender_loop()
loop.create_task(start_kernel_app())
if __name__ == "__main__":
if int(os.environ.get("IPYBLENDER_IN_BLENDER", "0")) > 0:
launch_kernel()
elif "IPYBLENDER_BLENDER" in os.environ:
env = dict(**os.environ)
env["IPYBLENDER_IN_BLENDER"] = "1"
args = [os.environ["IPYBLENDER_BLENDER"], "-P", __file__, "--", *sys.argv]
sys.exit(subprocess.call(args, env=env))
Display the source blob
Display the rendered blob
Raw
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
@jupyter-widgets/jupyterlab-manager
ipysheet
ipytree
jupyter-leaflet
jupyter-threejs
Copyright (c) 2020, dead pixels collective
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
Significant portions of this code are derived from:
https://github.com/akloster/blender-asyncio
Copyright 2015 Andreas Klostermann
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
include README.md *.png LICENSE
#!/usr/bin/env bash
echo "CONDA_PREFIX is $CONDA_PREFIX"
if [ "${NB_UID}not-on-binder" = "not-on-binder" ]; then
echo "...and that's fine"
else
if [ "${CONDA_PREFIX}no-conda-prefix" = "no-conda-prefix" ]; then
echo "...and we set it to ${CONDA_DIR}"
source ${CONDA_DIR}/bin/activate $CONDA_DIR
else
echo "...and that's fine"
fi
fi
set -eux
doit clean
doit || cat build/*
import setuptools
SETUP_ARGS = dict(
name="ipyblender",
version="0.1.0",
description="A blender kernel",
url="http://github.com/deathbeds/ipyblender",
author="Dead Pixels Collective",
author_email="ripx@groups.google.com",
license="MIT",
py_modules=["ipyblender"],
include_package_data=True,
zip_safe=False,
entry_points={
"console_scripts": [
"ipyblender-install = ipyblender:install_kernelspecs",
"ipyblender-kernel = ipyblender:launch_kernel",
],
},
)
if __name__ == "__main__":
setuptools.setup(**SETUP_ARGS)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment