Skip to content

Instantly share code, notes, and snippets.

@eldipa
Last active March 6, 2022 12:18
Show Gist options
  • Save eldipa/d9b02875a13537e72fbce4cdb8e3f282 to your computer and use it in GitHub Desktop.
Save eldipa/d9b02875a13537e72fbce4cdb8e3f282 to your computer and use it in GitHub Desktop.
Minimal example of how to load plugins in Python. `load_plugin_bad.py` does not support invoking subprocess of part of the plugins while `load_plugin_workaround.py` it does.
# Save this file into plugins/foo.py
class Foo:
@classmethod
def sayhi(cls):
print("Hi!")
import sys, pkgutil, inspect, pickle
import importlib.util
import multiprocessing
def init():
dirnames = ["plugins/"]
objs = []
# For each plugin folder, see which Python files are there
# and load them
for importer, name, is_pkg in pkgutil.iter_modules(dirnames):
# Find and load the Python module
spec = importer.find_spec(name)
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
# XXX Fix 1
sys.modules[name] = module
# Only load the classes from the module
classes = inspect.getmembers(module, lambda obj: inspect.isclass(obj))
# Create one object per class
for _, class_ in classes:
print(class_.__module__)
objs.append(class_())
return objs
def try_spawn(obj):
# Here is where we have the problem. obj.sayhi can be pickled by the
# parent process (us) but it cannot be unpickled by the child process
# because with the "spawn" and "forkserver" start methods, the child
# process is an independent Python server that has no idea of how to
# load the plugins, therefore, the child will not know how to load
# the given obj.sayhi code.
ch = multiprocessing.Process(target=obj.sayhi)
ch.start()
ch.join()
if __name__ == '__main__':
multiprocessing.set_start_method('spawn')
objs = init()
try_spawn(objs[0])
import sys, pkgutil, inspect, pickle
import importlib.util
import multiprocessing
import multiprocessing.reduction as red
def loader():
dirnames = ["plugins/"]
loaded_modules = []
# For each plugin folder, see which Python files are there
# and load them
for importer, name, is_pkg in pkgutil.iter_modules(dirnames):
# Find and load the Python module
spec = importer.find_spec(name)
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
# XXX Fix 1
sys.modules[name] = module
loaded_modules.append(module)
return loaded_modules
def init():
objs = []
# we split the load of the modules from the initialization of the plugins
# so we can reuse the loader later. Nothing else changed here.
for module in loader():
# Only load the classes from the module
classes = inspect.getmembers(module, lambda obj: inspect.isclass(obj))
# Create one object per class
for _, class_ in classes:
print(class_.__module__)
objs.append(class_())
return objs
def trampoline(target_str):
loader()
target = red.ForkingPickler.loads(target_str)
return target()
def try_spawn(obj):
# We serialize obj.sayhi as multiprocessing would do (here we could
# also put the serialization of any argument for obj.sayhi)
target_str = bytes(red.ForkingPickler.dumps(obj.sayhi))
# Instead of executing obj.sayhi we execute trampoline() which will
# (re)load the modules before unpickling and executing the real target
ch = multiprocessing.Process(target=trampoline, args=(target_str,))
ch.start()
ch.join()
if __name__ == '__main__':
multiprocessing.set_start_method('spawn')
objs = init()
try_spawn(objs[0])
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment