Hello! Welcome to the Dask sprints for SciPy 2020.
Here's a few links for getting started contributing:
- Our contributor guide is here. This should cover
import beeline | |
from starlette.exceptions import HTTPException | |
from starlette.datastructures import Headers | |
class HoneycombMiddleware: | |
def __init__(self, app): | |
self.app = app |
import pickle | |
import struct | |
class Memo(object): | |
def __init__(self): | |
self.kv = {} | |
def put(self, item): | |
ind = self.kv[id(item)] = len(self.kv) |
import os | |
import graphviz | |
from dask.optimize import key_split | |
from dask.dot import _get_display_cls | |
from dask.core import get_dependencies | |
def node_key(s): | |
if isinstance(s, tuple): |
import typing | |
import inspect | |
def _extract_attributes(bases, attrs): | |
arg_fields = {} | |
kwarg_fields = {} | |
existing_slots = set() | |
# Walk up the bases, validating and merging defaults |
In [3]: from typing import NamedTuple | |
In [4]: from quickle import Struct | |
In [5]: from dataclasses import dataclass | |
In [6]: class PointTuple(NamedTuple): | |
...: x: int | |
...: y: int | |
...: |
from prefect import Flow | |
from prefect.run_configs import KubernetesRun | |
from prefect.storage import Docker | |
with Flow("kubernetes-example") as flow: | |
# Add tasks to flow here... | |
# Run on Kubernetes with a custom resource configuration | |
flow.run_config = KubernetesRun(cpu_request=2, memory_request="4Gi") | |
# Store the flow in a docker image |
from prefect import Flow | |
from prefect.environments.execution import KubernetesJobEnvironment | |
from prefect.environments.storage import Docker | |
with Flow("kubernetes-example") as flow: | |
# Add tasks to flow here... | |
# Run on Kubernetes using a custom job specification | |
# This was needed to do even simple things like increase | |
# the job resource limits |