Skip to content

Instantly share code, notes, and snippets.

@mhash1m
Created May 28, 2020 13:42
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save mhash1m/9fd4867d9a9cbacf5b53ba9fbda799df to your computer and use it in GitHub Desktop.
Save mhash1m/9fd4867d9a9cbacf5b53ba9fbda799df to your computer and use it in GitHub Desktop.
Log after clearing cache and running CI tests
hashim@hashim-ThinkPad-W520:~/dffml$ sudo docker run --rm -ti -u $(id -u):$(id -g) -e LOGGING=debug -e USER=$USER -v $HOME/.cache/pip:/home/$USER/.cache/pip -w /usr/src/dffml -v $PWD:/usr/src/dffml -w /usr/src/dffml --entrypoint .ci/docker-entrypoint.sh python:3.7 .
+ '[' x '!=' x ']'
+ PYTHON=python3
+ '[' xhashim == x ']'
+ echo '#!/usr/bin/env bash'
+ chmod 755 /tmp/cmd.sh
++ mktemp -d
+ export VIRTUAL_ENV_DIR=/tmp/tmp.AwnbGcW8bL
+ VIRTUAL_ENV_DIR=/tmp/tmp.AwnbGcW8bL
+ python3 -m venv /tmp/tmp.AwnbGcW8bL
+ . /tmp/tmp.AwnbGcW8bL/bin/activate
++ deactivate nondestructive
++ '[' -n '' ']'
++ '[' -n '' ']'
++ '[' -n /bin/bash -o -n '' ']'
++ hash -r
++ '[' -n '' ']'
++ unset VIRTUAL_ENV
++ '[' '!' nondestructive = nondestructive ']'
++ VIRTUAL_ENV=/tmp/tmp.AwnbGcW8bL
++ export VIRTUAL_ENV
++ _OLD_VIRTUAL_PATH=/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
++ PATH=/tmp/tmp.AwnbGcW8bL/bin:/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
++ export PATH
++ '[' -n '' ']'
++ '[' -z '' ']'
++ _OLD_VIRTUAL_PS1=
++ '[' 'x(tmp.AwnbGcW8bL) ' '!=' x ']'
++ PS1='(tmp.AwnbGcW8bL) '
++ export PS1
++ '[' -n /bin/bash -o -n '' ']'
++ hash -r
++ mktemp -d
+ export HOME=/tmp/tmp.tPXahPUaJ4
+ HOME=/tmp/tmp.tPXahPUaJ4
+ '[' -d /home/hashim/.cache/pip ']'
+ export PIP_CACHE_DIR=/home/hashim/.cache/pip
+ PIP_CACHE_DIR=/home/hashim/.cache/pip
+ mkdir -p /tmp/tmp.tPXahPUaJ4/.cache
+ mkdir -p /tmp/tmp.tPXahPUaJ4/.local/bin
+ export PATH=/tmp/tmp.tPXahPUaJ4/.local/bin:/tmp/tmp.AwnbGcW8bL/bin:/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
+ PATH=/tmp/tmp.tPXahPUaJ4/.local/bin:/tmp/tmp.AwnbGcW8bL/bin:/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
+ '[' x. == x ']'
+ echo './.ci/run.sh .'
+ source ./.ci/deps.sh .
++ set -ex
++ export PLUGIN=.
++ PLUGIN=.
++ '[' x/home/hashim/.cache/pip '!=' x ']'
++ mkdir -p /home/hashim/.cache/pip
+++ python -c 'import sys; print(f"{sys.version_info.major}{sys.version_info.minor}")'
++ python_version=py37
++ export PATH=/home/hashim/.cache/pip/minicondapy37/bin:/tmp/tmp.tPXahPUaJ4/.local/bin:/tmp/tmp.AwnbGcW8bL/bin:/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
++ PATH=/home/hashim/.cache/pip/minicondapy37/bin:/tmp/tmp.tPXahPUaJ4/.local/bin:/tmp/tmp.AwnbGcW8bL/bin:/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
+++ python -c 'import pathlib, os; print(any(map(lambda path: pathlib.Path(path, "conda").is_file(), os.environ.get("PATH", "").split(":"))))'
++ has_conda=False
++ mkdir -p /tmp/tmp.tPXahPUaJ4/.local/bin
++ [[ x. == \x\m\o\d\e\l\/\d\a\a\l\4\p\y ]]
++ [[ x. == \x\m\o\d\e\l\/\v\o\w\p\a\l\W\a\b\b\i\t ]]
++ [[ x. == \x\. ]]
++ [[ False != \T\r\u\e ]]
++ conda_url=https://repo.anaconda.com/miniconda/Miniconda3-py37_4.8.2-Linux-x86_64.sh
++ conda_download=/home/hashim/.cache/pip/condapy37.sh
++ '[' py37 == py37 ']'
++ conda_hash=957d2f0f0701c3d1335e3b39f235d197837ad69a944fa6f5d8ad2c686b69df3b
++ '[' '!' -f /home/hashim/.cache/pip/condapy37.sh ']'
++ curl -L https://repo.anaconda.com/miniconda/Miniconda3-py37_4.8.2-Linux-x86_64.sh -o /home/hashim/.cache/pip/condapy37.sh
% Total % Received % Xferd Average Speed Time Time Time Current
Dload Upload Total Spent Left Speed
100 81.1M 100 81.1M 0 0 1704k 0 0:00:48 0:00:48 --:--:-- 1765k
++ sha256sum /home/hashim/.cache/pip/condapy37.sh
++ grep '^957d2f0f0701c3d1335e3b39f235d197837ad69a944fa6f5d8ad2c686b69df3b'
957d2f0f0701c3d1335e3b39f235d197837ad69a944fa6f5d8ad2c686b69df3b /home/hashim/.cache/pip/condapy37.sh
++ bash /home/hashim/.cache/pip/condapy37.sh -b -p /home/hashim/.cache/pip/minicondapy37
PREFIX=/home/hashim/.cache/pip/minicondapy37
Unpacking payload ...
Collecting package metadata (current_repodata.json): done
Solving environment: done
## Package Plan ##
environment location: /home/hashim/.cache/pip/minicondapy37
added / updated specs:
- _libgcc_mutex==0.1=main
- asn1crypto==1.3.0=py37_0
- ca-certificates==2020.1.1=0
- certifi==2019.11.28=py37_0
- cffi==1.14.0=py37h2e261b9_0
- chardet==3.0.4=py37_1003
- conda-package-handling==1.6.0=py37h7b6447c_0
- conda==4.8.2=py37_0
- cryptography==2.8=py37h1ba5d50_0
- idna==2.8=py37_0
- ld_impl_linux-64==2.33.1=h53a641e_7
- libedit==3.1.20181209=hc058e9b_0
- libffi==3.2.1=hd88cf55_4
- libgcc-ng==9.1.0=hdf63c60_0
- libstdcxx-ng==9.1.0=hdf63c60_0
- ncurses==6.2=he6710b0_0
- openssl==1.1.1d=h7b6447c_4
- pip==20.0.2=py37_1
- pycosat==0.6.3=py37h7b6447c_0
- pycparser==2.19=py37_0
- pyopenssl==19.1.0=py37_0
- pysocks==1.7.1=py37_0
- python==3.7.6=h0371630_2
- readline==7.0=h7b6447c_5
- requests==2.22.0=py37_1
- ruamel_yaml==0.15.87=py37h7b6447c_0
- setuptools==45.2.0=py37_0
- six==1.14.0=py37_0
- sqlite==3.31.1=h7b6447c_0
- tk==8.6.8=hbc83047_0
- tqdm==4.42.1=py_0
- urllib3==1.25.8=py37_0
- wheel==0.34.2=py37_0
- xz==5.2.4=h14c3975_4
- yaml==0.1.7=had09818_2
- zlib==1.2.11=h7b6447c_3
The following NEW packages will be INSTALLED:
_libgcc_mutex pkgs/main/linux-64::_libgcc_mutex-0.1-main
asn1crypto pkgs/main/linux-64::asn1crypto-1.3.0-py37_0
ca-certificates pkgs/main/linux-64::ca-certificates-2020.1.1-0
certifi pkgs/main/linux-64::certifi-2019.11.28-py37_0
cffi pkgs/main/linux-64::cffi-1.14.0-py37h2e261b9_0
chardet pkgs/main/linux-64::chardet-3.0.4-py37_1003
conda pkgs/main/linux-64::conda-4.8.2-py37_0
conda-package-han~ pkgs/main/linux-64::conda-package-handling-1.6.0-py37h7b6447c_0
cryptography pkgs/main/linux-64::cryptography-2.8-py37h1ba5d50_0
idna pkgs/main/linux-64::idna-2.8-py37_0
ld_impl_linux-64 pkgs/main/linux-64::ld_impl_linux-64-2.33.1-h53a641e_7
libedit pkgs/main/linux-64::libedit-3.1.20181209-hc058e9b_0
libffi pkgs/main/linux-64::libffi-3.2.1-hd88cf55_4
libgcc-ng pkgs/main/linux-64::libgcc-ng-9.1.0-hdf63c60_0
libstdcxx-ng pkgs/main/linux-64::libstdcxx-ng-9.1.0-hdf63c60_0
ncurses pkgs/main/linux-64::ncurses-6.2-he6710b0_0
openssl pkgs/main/linux-64::openssl-1.1.1d-h7b6447c_4
pip pkgs/main/linux-64::pip-20.0.2-py37_1
pycosat pkgs/main/linux-64::pycosat-0.6.3-py37h7b6447c_0
pycparser pkgs/main/linux-64::pycparser-2.19-py37_0
pyopenssl pkgs/main/linux-64::pyopenssl-19.1.0-py37_0
pysocks pkgs/main/linux-64::pysocks-1.7.1-py37_0
python pkgs/main/linux-64::python-3.7.6-h0371630_2
readline pkgs/main/linux-64::readline-7.0-h7b6447c_5
requests pkgs/main/linux-64::requests-2.22.0-py37_1
ruamel_yaml pkgs/main/linux-64::ruamel_yaml-0.15.87-py37h7b6447c_0
setuptools pkgs/main/linux-64::setuptools-45.2.0-py37_0
six pkgs/main/linux-64::six-1.14.0-py37_0
sqlite pkgs/main/linux-64::sqlite-3.31.1-h7b6447c_0
tk pkgs/main/linux-64::tk-8.6.8-hbc83047_0
tqdm pkgs/main/noarch::tqdm-4.42.1-py_0
urllib3 pkgs/main/linux-64::urllib3-1.25.8-py37_0
wheel pkgs/main/linux-64::wheel-0.34.2-py37_0
xz pkgs/main/linux-64::xz-5.2.4-h14c3975_4
yaml pkgs/main/linux-64::yaml-0.1.7-had09818_2
zlib pkgs/main/linux-64::zlib-1.2.11-h7b6447c_3
Preparing transaction: done
Executing transaction: done
installation finished.
++ conda update -y -n base -c defaults conda
Collecting package metadata (current_repodata.json): done
Solving environment: done
## Package Plan ##
environment location: /home/hashim/.cache/pip/minicondapy37
added / updated specs:
- conda
The following packages will be downloaded:
package | build
---------------------------|-----------------
certifi-2020.4.5.1 | py37_0 155 KB
conda-4.8.3 | py37_0 2.8 MB
conda-package-handling-1.6.1| py37h7b6447c_0 798 KB
cryptography-2.9.2 | py37h1ba5d50_0 552 KB
idna-2.9 | py_1 49 KB
ncurses-6.2 | he6710b0_1 817 KB
openssl-1.1.1g | h7b6447c_0 2.5 MB
pip-20.0.2 | py37_3 1.7 MB
pycparser-2.20 | py_0 92 KB
requests-2.23.0 | py37_0 92 KB
setuptools-46.4.0 | py37_0 514 KB
sqlite-3.31.1 | h62c20be_1 1.1 MB
tqdm-4.46.0 | py_0 60 KB
xz-5.2.5 | h7b6447c_0 341 KB
------------------------------------------------------------
Total: 11.5 MB
The following packages will be REMOVED:
asn1crypto-1.3.0-py37_0
The following packages will be UPDATED:
certifi 2019.11.28-py37_0 --> 2020.4.5.1-py37_0
conda 4.8.2-py37_0 --> 4.8.3-py37_0
conda-package-han~ 1.6.0-py37h7b6447c_0 --> 1.6.1-py37h7b6447c_0
cryptography 2.8-py37h1ba5d50_0 --> 2.9.2-py37h1ba5d50_0
idna pkgs/main/linux-64::idna-2.8-py37_0 --> pkgs/main/noarch::idna-2.9-py_1
ncurses 6.2-he6710b0_0 --> 6.2-he6710b0_1
openssl 1.1.1d-h7b6447c_4 --> 1.1.1g-h7b6447c_0
pip 20.0.2-py37_1 --> 20.0.2-py37_3
pycparser pkgs/main/linux-64::pycparser-2.19-py~ --> pkgs/main/noarch::pycparser-2.20-py_0
requests 2.22.0-py37_1 --> 2.23.0-py37_0
setuptools 45.2.0-py37_0 --> 46.4.0-py37_0
sqlite 3.31.1-h7b6447c_0 --> 3.31.1-h62c20be_1
tqdm 4.42.1-py_0 --> 4.46.0-py_0
xz 5.2.4-h14c3975_4 --> 5.2.5-h7b6447c_0
Downloading and Extracting Packages
ncurses-6.2 | 817 KB | #################################### | 100%
conda-package-handli | 798 KB | #################################### | 100%
idna-2.9 | 49 KB | #################################### | 100%
pycparser-2.20 | 92 KB | #################################### | 100%
certifi-2020.4.5.1 | 155 KB | #################################### | 100%
requests-2.23.0 | 92 KB | #################################### | 100%
setuptools-46.4.0 | 514 KB | #################################### | 100%
pip-20.0.2 | 1.7 MB | #################################### | 100%
tqdm-4.46.0 | 60 KB | #################################### | 100%
xz-5.2.5 | 341 KB | #################################### | 100%
openssl-1.1.1g | 2.5 MB | #################################### | 100%
conda-4.8.3 | 2.8 MB | #################################### | 100%
cryptography-2.9.2 | 552 KB | #################################### | 100%
sqlite-3.31.1 | 1.1 MB | #################################### | 100%
Preparing transaction: done
Verifying transaction: done
Executing transaction: done
++ conda config --add channels anaconda
++ conda config --add channels conda-forge
++ '[' -f /home/hashim/.cache/pip/minicondapy37/bin/activate ']'
++ source /home/hashim/.cache/pip/minicondapy37/bin/activate base
+++ _CONDA_ROOT=/home/hashim/.cache/pip/minicondapy37
+++ . /home/hashim/.cache/pip/minicondapy37/etc/profile.d/conda.sh
++++ export CONDA_EXE=/home/hashim/.cache/pip/minicondapy37/bin/conda
++++ CONDA_EXE=/home/hashim/.cache/pip/minicondapy37/bin/conda
++++ export _CE_M=
++++ _CE_M=
++++ export _CE_CONDA=
++++ _CE_CONDA=
++++ export CONDA_PYTHON_EXE=/home/hashim/.cache/pip/minicondapy37/bin/python
++++ CONDA_PYTHON_EXE=/home/hashim/.cache/pip/minicondapy37/bin/python
++++ '[' -z '' ']'
++++ export CONDA_SHLVL=0
++++ CONDA_SHLVL=0
++++ '[' -n x ']'
++++ '[' -n '' ']'
++++++ dirname /home/hashim/.cache/pip/minicondapy37/bin/conda
+++++ dirname /home/hashim/.cache/pip/minicondapy37/bin
++++ PATH=/home/hashim/.cache/pip/minicondapy37/condabin:/home/hashim/.cache/pip/minicondapy37/bin:/tmp/tmp.tPXahPUaJ4/.local/bin:/tmp/tmp.AwnbGcW8bL/bin:/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
++++ export PATH
++++ '[' -z x ']'
+++ conda activate base
+++ '[' 2 -lt 1 ']'
+++ local cmd=activate
+++ shift
+++ case "$cmd" in
+++ __conda_activate activate base
+++ '[' -n '' ']'
+++ local cmd=activate
+++ shift
+++ local ask_conda
+++ CONDA_INTERNAL_OLDPATH=/home/hashim/.cache/pip/minicondapy37/condabin:/home/hashim/.cache/pip/minicondapy37/bin:/tmp/tmp.tPXahPUaJ4/.local/bin:/tmp/tmp.AwnbGcW8bL/bin:/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
+++ __add_sys_prefix_to_path
+++ '[' -n '' ']'
++++ dirname /home/hashim/.cache/pip/minicondapy37/bin/conda
+++ SYSP=/home/hashim/.cache/pip/minicondapy37/bin
++++ dirname /home/hashim/.cache/pip/minicondapy37/bin
+++ SYSP=/home/hashim/.cache/pip/minicondapy37
+++ '[' -n '' ']'
+++ PATH=/home/hashim/.cache/pip/minicondapy37/bin:/home/hashim/.cache/pip/minicondapy37/condabin:/home/hashim/.cache/pip/minicondapy37/bin:/tmp/tmp.tPXahPUaJ4/.local/bin:/tmp/tmp.AwnbGcW8bL/bin:/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
+++ export PATH
++++ PS1='(tmp.AwnbGcW8bL) '
++++ /home/hashim/.cache/pip/minicondapy37/bin/conda shell.posix activate base
+++ ask_conda='PS1='\''(base) (tmp.AwnbGcW8bL) '\''
export PATH='\''/home/hashim/.cache/pip/minicondapy37/bin:/home/hashim/.cache/pip/minicondapy37/condabin:/home/hashim/.cache/pip/minicondapy37/bin:/tmp/tmp.tPXahPUaJ4/.local/bin:/tmp/tmp.AwnbGcW8bL/bin:/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin'\''
export CONDA_PREFIX='\''/home/hashim/.cache/pip/minicondapy37'\''
export CONDA_SHLVL='\''1'\''
export CONDA_DEFAULT_ENV='\''base'\''
export CONDA_PROMPT_MODIFIER='\''(base) '\''
export CONDA_EXE='\''/home/hashim/.cache/pip/minicondapy37/bin/conda'\''
export _CE_M='\'''\''
export _CE_CONDA='\'''\''
export CONDA_PYTHON_EXE='\''/home/hashim/.cache/pip/minicondapy37/bin/python'\'''
+++ rc=0
+++ PATH=/home/hashim/.cache/pip/minicondapy37/condabin:/home/hashim/.cache/pip/minicondapy37/bin:/tmp/tmp.tPXahPUaJ4/.local/bin:/tmp/tmp.AwnbGcW8bL/bin:/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
+++ eval 'PS1='\''(base) (tmp.AwnbGcW8bL) '\''
export PATH='\''/home/hashim/.cache/pip/minicondapy37/bin:/home/hashim/.cache/pip/minicondapy37/condabin:/home/hashim/.cache/pip/minicondapy37/bin:/tmp/tmp.tPXahPUaJ4/.local/bin:/tmp/tmp.AwnbGcW8bL/bin:/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin'\''
export CONDA_PREFIX='\''/home/hashim/.cache/pip/minicondapy37'\''
export CONDA_SHLVL='\''1'\''
export CONDA_DEFAULT_ENV='\''base'\''
export CONDA_PROMPT_MODIFIER='\''(base) '\''
export CONDA_EXE='\''/home/hashim/.cache/pip/minicondapy37/bin/conda'\''
export _CE_M='\'''\''
export _CE_CONDA='\'''\''
export CONDA_PYTHON_EXE='\''/home/hashim/.cache/pip/minicondapy37/bin/python'\'''
++++ PS1='(base) (tmp.AwnbGcW8bL) '
++++ export PATH=/home/hashim/.cache/pip/minicondapy37/bin:/home/hashim/.cache/pip/minicondapy37/condabin:/home/hashim/.cache/pip/minicondapy37/bin:/tmp/tmp.tPXahPUaJ4/.local/bin:/tmp/tmp.AwnbGcW8bL/bin:/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
++++ PATH=/home/hashim/.cache/pip/minicondapy37/bin:/home/hashim/.cache/pip/minicondapy37/condabin:/home/hashim/.cache/pip/minicondapy37/bin:/tmp/tmp.tPXahPUaJ4/.local/bin:/tmp/tmp.AwnbGcW8bL/bin:/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
++++ export CONDA_PREFIX=/home/hashim/.cache/pip/minicondapy37
++++ CONDA_PREFIX=/home/hashim/.cache/pip/minicondapy37
++++ export CONDA_SHLVL=1
++++ CONDA_SHLVL=1
++++ export CONDA_DEFAULT_ENV=base
++++ CONDA_DEFAULT_ENV=base
++++ export 'CONDA_PROMPT_MODIFIER=(base) '
++++ CONDA_PROMPT_MODIFIER='(base) '
++++ export CONDA_EXE=/home/hashim/.cache/pip/minicondapy37/bin/conda
++++ CONDA_EXE=/home/hashim/.cache/pip/minicondapy37/bin/conda
++++ export _CE_M=
++++ _CE_M=
++++ export _CE_CONDA=
++++ _CE_CONDA=
++++ export CONDA_PYTHON_EXE=/home/hashim/.cache/pip/minicondapy37/bin/python
++++ CONDA_PYTHON_EXE=/home/hashim/.cache/pip/minicondapy37/bin/python
+++ '[' 0 '!=' 0 ']'
+++ __conda_hashr
+++ '[' -n '' ']'
+++ '[' -n '' ']'
+++ hash -r
++ python -m pip install --upgrade pip setuptools twine
Collecting pip
Downloading pip-20.1.1-py2.py3-none-any.whl (1.5 MB)
|████████████████████████████████| 1.5 MB 967 kB/s
Collecting setuptools
Downloading setuptools-47.1.0-py3-none-any.whl (583 kB)
|████████████████████████████████| 583 kB 204 kB/s
Collecting twine
Downloading twine-3.1.1-py3-none-any.whl (36 kB)
Requirement already satisfied, skipping upgrade: tqdm>=4.14 in /home/hashim/.cache/pip/minicondapy37/lib/python3.7/site-packages (from twine) (4.46.0)
Collecting readme-renderer>=21.0
Downloading readme_renderer-26.0-py2.py3-none-any.whl (15 kB)
Requirement already satisfied, skipping upgrade: requests>=2.20 in /home/hashim/.cache/pip/minicondapy37/lib/python3.7/site-packages (from twine) (2.23.0)
Collecting requests-toolbelt!=0.9.0,>=0.8.0
Downloading requests_toolbelt-0.9.1-py2.py3-none-any.whl (54 kB)
|████████████████████████████████| 54 kB 179 kB/s
Collecting keyring>=15.1
Downloading keyring-21.2.1-py3-none-any.whl (31 kB)
Collecting pkginfo>=1.4.2
Downloading pkginfo-1.5.0.1-py2.py3-none-any.whl (25 kB)
Collecting importlib-metadata; python_version < "3.8"
Downloading importlib_metadata-1.6.0-py2.py3-none-any.whl (30 kB)
Collecting docutils>=0.13.1
Downloading docutils-0.16-py2.py3-none-any.whl (548 kB)
|████████████████████████████████| 548 kB 146 kB/s
Collecting Pygments>=2.5.1
Downloading Pygments-2.6.1-py3-none-any.whl (914 kB)
|████████████████████████████████| 914 kB 242 kB/s
Requirement already satisfied, skipping upgrade: six in /home/hashim/.cache/pip/minicondapy37/lib/python3.7/site-packages (from readme-renderer>=21.0->twine) (1.14.0)
Collecting bleach>=2.1.0
Downloading bleach-3.1.5-py2.py3-none-any.whl (151 kB)
|████████████████████████████████| 151 kB 211 kB/s
Requirement already satisfied, skipping upgrade: urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1 in /home/hashim/.cache/pip/minicondapy37/lib/python3.7/site-packages (from requests>=2.20->twine) (1.25.8)
Requirement already satisfied, skipping upgrade: certifi>=2017.4.17 in /home/hashim/.cache/pip/minicondapy37/lib/python3.7/site-packages (from requests>=2.20->twine) (2020.4.5.1)
Requirement already satisfied, skipping upgrade: idna<3,>=2.5 in /home/hashim/.cache/pip/minicondapy37/lib/python3.7/site-packages (from requests>=2.20->twine) (2.9)
Requirement already satisfied, skipping upgrade: chardet<4,>=3.0.2 in /home/hashim/.cache/pip/minicondapy37/lib/python3.7/site-packages (from requests>=2.20->twine) (3.0.4)
Collecting jeepney>=0.4.2; sys_platform == "linux"
Downloading jeepney-0.4.3-py3-none-any.whl (21 kB)
Collecting SecretStorage>=3; sys_platform == "linux"
Downloading SecretStorage-3.1.2-py3-none-any.whl (14 kB)
Collecting zipp>=0.5
Downloading zipp-3.1.0-py3-none-any.whl (4.9 kB)
Collecting packaging
Downloading packaging-20.4-py2.py3-none-any.whl (37 kB)
Collecting webencodings
Downloading webencodings-0.5.1-py2.py3-none-any.whl (11 kB)
Requirement already satisfied, skipping upgrade: cryptography in /home/hashim/.cache/pip/minicondapy37/lib/python3.7/site-packages (from SecretStorage>=3; sys_platform == "linux"->keyring>=15.1->twine) (2.9.2)
Collecting pyparsing>=2.0.2
Downloading pyparsing-2.4.7-py2.py3-none-any.whl (67 kB)
|████████████████████████████████| 67 kB 145 kB/s
Requirement already satisfied, skipping upgrade: cffi!=1.11.3,>=1.8 in /home/hashim/.cache/pip/minicondapy37/lib/python3.7/site-packages (from cryptography->SecretStorage>=3; sys_platform == "linux"->keyring>=15.1->twine) (1.14.0)
Requirement already satisfied, skipping upgrade: pycparser in /home/hashim/.cache/pip/minicondapy37/lib/python3.7/site-packages (from cffi!=1.11.3,>=1.8->cryptography->SecretStorage>=3; sys_platform == "linux"->keyring>=15.1->twine) (2.20)
Installing collected packages: pip, setuptools, docutils, Pygments, pyparsing, packaging, webencodings, bleach, readme-renderer, requests-toolbelt, jeepney, zipp, importlib-metadata, SecretStorage, keyring, pkginfo, twine
Attempting uninstall: pip
Found existing installation: pip 20.0.2
Uninstalling pip-20.0.2:
Successfully uninstalled pip-20.0.2
Attempting uninstall: setuptools
Found existing installation: setuptools 46.4.0.post20200518
Uninstalling setuptools-46.4.0.post20200518:
Successfully uninstalled setuptools-46.4.0.post20200518
Successfully installed Pygments-2.6.1 SecretStorage-3.1.2 bleach-3.1.5 docutils-0.16 importlib-metadata-1.6.0 jeepney-0.4.3 keyring-21.2.1 packaging-20.4 pip-20.1.1 pkginfo-1.5.0.1 pyparsing-2.4.7 readme-renderer-26.0 requests-toolbelt-0.9.1 setuptools-47.1.0 twine-3.1.1 webencodings-0.5.1 zipp-3.1.0
++ pip install -U -e '.[dev]'
Obtaining file:///usr/src/dffml
Installing build dependencies ... done
Getting requirements to build wheel ... done
Preparing wheel metadata ... done
Collecting jsbeautifier; extra == "dev"
Downloading jsbeautifier-1.11.0.tar.gz (69 kB)
|████████████████████████████████| 69 kB 204 kB/s
Collecting coverage; extra == "dev"
Downloading coverage-5.1-cp37-cp37m-manylinux1_x86_64.whl (227 kB)
|████████████████████████████████| 227 kB 616 kB/s
Collecting black; extra == "dev"
Downloading black-19.10b0-py36-none-any.whl (97 kB)
|████████████████████████████████| 97 kB 1.2 MB/s
Requirement already satisfied, skipping upgrade: twine; extra == "dev" in /home/hashim/.cache/pip/minicondapy37/lib/python3.7/site-packages (from dffml==0.3.7) (3.1.1)
Collecting codecov; extra == "dev"
Downloading codecov-2.1.3.tar.gz (18 kB)
Collecting sphinx-rtd-theme; extra == "dev"
Downloading sphinx_rtd_theme-0.4.3-py2.py3-none-any.whl (6.4 MB)
|████████████████████████████████| 6.4 MB 801 kB/s
Collecting recommonmark; extra == "dev"
Downloading recommonmark-0.6.0-py2.py3-none-any.whl (10 kB)
Collecting sphinx; extra == "dev"
Downloading Sphinx-3.0.4-py3-none-any.whl (2.8 MB)
|████████████████████████████████| 2.8 MB 1.7 MB/s
Requirement already satisfied, skipping upgrade: six>=1.13.0 in /home/hashim/.cache/pip/minicondapy37/lib/python3.7/site-packages (from jsbeautifier; extra == "dev"->dffml==0.3.7) (1.14.0)
Collecting editorconfig>=0.12.2
Downloading EditorConfig-0.12.2.tar.gz (13 kB)
Collecting attrs>=18.1.0
Downloading attrs-19.3.0-py2.py3-none-any.whl (39 kB)
Collecting regex
Downloading regex-2020.5.14-cp37-cp37m-manylinux2010_x86_64.whl (675 kB)
|████████████████████████████████| 675 kB 1.6 MB/s
Collecting pathspec<1,>=0.6
Downloading pathspec-0.8.0-py2.py3-none-any.whl (28 kB)
Collecting click>=6.5
Downloading click-7.1.2-py2.py3-none-any.whl (82 kB)
|████████████████████████████████| 82 kB 481 kB/s
Collecting toml>=0.9.4
Downloading toml-0.10.1-py2.py3-none-any.whl (19 kB)
Collecting appdirs
Downloading appdirs-1.4.4-py2.py3-none-any.whl (9.6 kB)
Collecting typed-ast>=1.4.0
Downloading typed_ast-1.4.1-cp37-cp37m-manylinux1_x86_64.whl (737 kB)
|████████████████████████████████| 737 kB 1.9 MB/s
Requirement already satisfied, skipping upgrade: importlib-metadata; python_version < "3.8" in /home/hashim/.cache/pip/minicondapy37/lib/python3.7/site-packages (from twine; extra == "dev"->dffml==0.3.7) (1.6.0)
Requirement already satisfied, skipping upgrade: requests-toolbelt!=0.9.0,>=0.8.0 in /home/hashim/.cache/pip/minicondapy37/lib/python3.7/site-packages (from twine; extra == "dev"->dffml==0.3.7) (0.9.1)
Requirement already satisfied, skipping upgrade: keyring>=15.1 in /home/hashim/.cache/pip/minicondapy37/lib/python3.7/site-packages (from twine; extra == "dev"->dffml==0.3.7) (21.2.1)
Requirement already satisfied, skipping upgrade: readme-renderer>=21.0 in /home/hashim/.cache/pip/minicondapy37/lib/python3.7/site-packages (from twine; extra == "dev"->dffml==0.3.7) (26.0)
Requirement already satisfied, skipping upgrade: pkginfo>=1.4.2 in /home/hashim/.cache/pip/minicondapy37/lib/python3.7/site-packages (from twine; extra == "dev"->dffml==0.3.7) (1.5.0.1)
Requirement already satisfied, skipping upgrade: tqdm>=4.14 in /home/hashim/.cache/pip/minicondapy37/lib/python3.7/site-packages (from twine; extra == "dev"->dffml==0.3.7) (4.46.0)
Requirement already satisfied, skipping upgrade: requests>=2.20 in /home/hashim/.cache/pip/minicondapy37/lib/python3.7/site-packages (from twine; extra == "dev"->dffml==0.3.7) (2.23.0)
Requirement already satisfied, skipping upgrade: setuptools>=0.7.0 in /home/hashim/.cache/pip/minicondapy37/lib/python3.7/site-packages (from twine; extra == "dev"->dffml==0.3.7) (47.1.0)
Collecting commonmark>=0.8.1
Downloading commonmark-0.9.1-py2.py3-none-any.whl (51 kB)
|████████████████████████████████| 51 kB 109 kB/s
Requirement already satisfied, skipping upgrade: docutils>=0.11 in /home/hashim/.cache/pip/minicondapy37/lib/python3.7/site-packages (from recommonmark; extra == "dev"->dffml==0.3.7) (0.16)
Collecting babel>=1.3
Downloading Babel-2.8.0-py2.py3-none-any.whl (8.6 MB)
|████████████████████████████████| 8.6 MB 886 kB/s
Collecting sphinxcontrib-devhelp
Downloading sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl (84 kB)
|████████████████████████████████| 84 kB 822 kB/s
Collecting alabaster<0.8,>=0.7
Downloading alabaster-0.7.12-py2.py3-none-any.whl (14 kB)
Collecting sphinxcontrib-applehelp
Downloading sphinxcontrib_applehelp-1.0.2-py2.py3-none-any.whl (121 kB)
|████████████████████████████████| 121 kB 700 kB/s
Collecting snowballstemmer>=1.1
Downloading snowballstemmer-2.0.0-py2.py3-none-any.whl (97 kB)
|████████████████████████████████| 97 kB 850 kB/s
Requirement already satisfied, skipping upgrade: Pygments>=2.0 in /home/hashim/.cache/pip/minicondapy37/lib/python3.7/site-packages (from sphinx; extra == "dev"->dffml==0.3.7) (2.6.1)
Collecting sphinxcontrib-jsmath
Downloading sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl (5.1 kB)
Collecting sphinxcontrib-serializinghtml
Downloading sphinxcontrib_serializinghtml-1.1.4-py2.py3-none-any.whl (89 kB)
|████████████████████████████████| 89 kB 861 kB/s
Collecting sphinxcontrib-qthelp
Downloading sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl (90 kB)
|████████████████████████████████| 90 kB 749 kB/s
Requirement already satisfied, skipping upgrade: packaging in /home/hashim/.cache/pip/minicondapy37/lib/python3.7/site-packages (from sphinx; extra == "dev"->dffml==0.3.7) (20.4)
Collecting imagesize
Downloading imagesize-1.2.0-py2.py3-none-any.whl (4.8 kB)
Collecting Jinja2>=2.3
Downloading Jinja2-2.11.2-py2.py3-none-any.whl (125 kB)
|████████████████████████████████| 125 kB 722 kB/s
Collecting sphinxcontrib-htmlhelp
Downloading sphinxcontrib_htmlhelp-1.0.3-py2.py3-none-any.whl (96 kB)
|████████████████████████████████| 96 kB 950 kB/s
Requirement already satisfied, skipping upgrade: zipp>=0.5 in /home/hashim/.cache/pip/minicondapy37/lib/python3.7/site-packages (from importlib-metadata; python_version < "3.8"->twine; extra == "dev"->dffml==0.3.7) (3.1.0)
Requirement already satisfied, skipping upgrade: jeepney>=0.4.2; sys_platform == "linux" in /home/hashim/.cache/pip/minicondapy37/lib/python3.7/site-packages (from keyring>=15.1->twine; extra == "dev"->dffml==0.3.7) (0.4.3)
Requirement already satisfied, skipping upgrade: SecretStorage>=3; sys_platform == "linux" in /home/hashim/.cache/pip/minicondapy37/lib/python3.7/site-packages (from keyring>=15.1->twine; extra == "dev"->dffml==0.3.7) (3.1.2)
Requirement already satisfied, skipping upgrade: bleach>=2.1.0 in /home/hashim/.cache/pip/minicondapy37/lib/python3.7/site-packages (from readme-renderer>=21.0->twine; extra == "dev"->dffml==0.3.7) (3.1.5)
Requirement already satisfied, skipping upgrade: urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1 in /home/hashim/.cache/pip/minicondapy37/lib/python3.7/site-packages (from requests>=2.20->twine; extra == "dev"->dffml==0.3.7) (1.25.8)
Requirement already satisfied, skipping upgrade: chardet<4,>=3.0.2 in /home/hashim/.cache/pip/minicondapy37/lib/python3.7/site-packages (from requests>=2.20->twine; extra == "dev"->dffml==0.3.7) (3.0.4)
Requirement already satisfied, skipping upgrade: certifi>=2017.4.17 in /home/hashim/.cache/pip/minicondapy37/lib/python3.7/site-packages (from requests>=2.20->twine; extra == "dev"->dffml==0.3.7) (2020.4.5.1)
Requirement already satisfied, skipping upgrade: idna<3,>=2.5 in /home/hashim/.cache/pip/minicondapy37/lib/python3.7/site-packages (from requests>=2.20->twine; extra == "dev"->dffml==0.3.7) (2.9)
Collecting pytz>=2015.7
Downloading pytz-2020.1-py2.py3-none-any.whl (510 kB)
|████████████████████████████████| 510 kB 945 kB/s
Requirement already satisfied, skipping upgrade: pyparsing>=2.0.2 in /home/hashim/.cache/pip/minicondapy37/lib/python3.7/site-packages (from packaging->sphinx; extra == "dev"->dffml==0.3.7) (2.4.7)
Collecting MarkupSafe>=0.23
Downloading MarkupSafe-1.1.1-cp37-cp37m-manylinux1_x86_64.whl (27 kB)
Requirement already satisfied, skipping upgrade: cryptography in /home/hashim/.cache/pip/minicondapy37/lib/python3.7/site-packages (from SecretStorage>=3; sys_platform == "linux"->keyring>=15.1->twine; extra == "dev"->dffml==0.3.7) (2.9.2)
Requirement already satisfied, skipping upgrade: webencodings in /home/hashim/.cache/pip/minicondapy37/lib/python3.7/site-packages (from bleach>=2.1.0->readme-renderer>=21.0->twine; extra == "dev"->dffml==0.3.7) (0.5.1)
Requirement already satisfied, skipping upgrade: cffi!=1.11.3,>=1.8 in /home/hashim/.cache/pip/minicondapy37/lib/python3.7/site-packages (from cryptography->SecretStorage>=3; sys_platform == "linux"->keyring>=15.1->twine; extra == "dev"->dffml==0.3.7) (1.14.0)
Requirement already satisfied, skipping upgrade: pycparser in /home/hashim/.cache/pip/minicondapy37/lib/python3.7/site-packages (from cffi!=1.11.3,>=1.8->cryptography->SecretStorage>=3; sys_platform == "linux"->keyring>=15.1->twine; extra == "dev"->dffml==0.3.7) (2.20)
Building wheels for collected packages: jsbeautifier, codecov, editorconfig
Building wheel for jsbeautifier (setup.py) ... done
Created wheel for jsbeautifier: filename=jsbeautifier-1.11.0-py3-none-any.whl size=88946 sha256=6c675843a31b051bb2c976f8e623d5a1865b04048ee6c99f166daea002ba0a02
Stored in directory: /home/hashim/.cache/pip/wheels/e2/82/51/b89c5d2bfd802be3ff6ba0007d299aa22bbba515d299df6801
Building wheel for codecov (setup.py) ... done
Created wheel for codecov: filename=codecov-2.1.3-py2.py3-none-any.whl size=15843 sha256=1dc42f3e5bf0c70c13deedeebd18cd18f502eac6335cc5a4bf091cea000aad17
Stored in directory: /home/hashim/.cache/pip/wheels/b3/97/13/8750a08228b412eeac6e5427f3e23681cdfb3bbe82320db1c6
Building wheel for editorconfig (setup.py) ... done
Created wheel for editorconfig: filename=EditorConfig-0.12.2-py3-none-any.whl size=15964 sha256=431bc02f44a7ab51c173d7489d3f467cf798128cfa4c427fdb4e2fe5170c371a
Stored in directory: /home/hashim/.cache/pip/wheels/76/50/c3/434552a16b269fa7e5050ab1aeffad8ab2ba3cf87b3a8dcc4c
Successfully built jsbeautifier codecov editorconfig
Installing collected packages: editorconfig, jsbeautifier, coverage, attrs, regex, pathspec, click, toml, appdirs, typed-ast, black, codecov, pytz, babel, sphinxcontrib-devhelp, alabaster, sphinxcontrib-applehelp, snowballstemmer, sphinxcontrib-jsmath, sphinxcontrib-serializinghtml, sphinxcontrib-qthelp, imagesize, MarkupSafe, Jinja2, sphinxcontrib-htmlhelp, sphinx, sphinx-rtd-theme, commonmark, recommonmark, dffml
Running setup.py develop for dffml
Successfully installed Jinja2-2.11.2 MarkupSafe-1.1.1 alabaster-0.7.12 appdirs-1.4.4 attrs-19.3.0 babel-2.8.0 black-19.10b0 click-7.1.2 codecov-2.1.3 commonmark-0.9.1 coverage-5.1 dffml editorconfig-0.12.2 imagesize-1.2.0 jsbeautifier-1.11.0 pathspec-0.8.0 pytz-2020.1 recommonmark-0.6.0 regex-2020.5.14 snowballstemmer-2.0.0 sphinx-3.0.4 sphinx-rtd-theme-0.4.3 sphinxcontrib-applehelp-1.0.2 sphinxcontrib-devhelp-1.0.2 sphinxcontrib-htmlhelp-1.0.3 sphinxcontrib-jsmath-1.0.1 sphinxcontrib-qthelp-1.0.3 sphinxcontrib-serializinghtml-1.1.4 toml-0.10.1 typed-ast-1.4.1
++ [[ x. == \x\f\e\a\t\u\r\e\/\g\i\t ]]
++ [[ x. == \x\o\p\e\r\a\t\i\o\n\s\/\d\e\p\l\o\y ]]
++ '[' . == source/mysql ']'
++ [[ x. == \x\m\o\d\e\l\/\v\o\w\p\a\l\W\a\b\b\i\t ]]
++ [[ x. == \x\. ]]
++ conda install -y -c conda-forge vowpalwabbit
++ '[' 5 -lt 1 ']'
++ local cmd=install
++ shift
++ case "$cmd" in
++ CONDA_INTERNAL_OLDPATH=/home/hashim/.cache/pip/minicondapy37/bin:/home/hashim/.cache/pip/minicondapy37/condabin:/home/hashim/.cache/pip/minicondapy37/bin:/tmp/tmp.tPXahPUaJ4/.local/bin:/tmp/tmp.AwnbGcW8bL/bin:/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
++ __add_sys_prefix_to_path
++ '[' -n '' ']'
+++ dirname /home/hashim/.cache/pip/minicondapy37/bin/conda
++ SYSP=/home/hashim/.cache/pip/minicondapy37/bin
+++ dirname /home/hashim/.cache/pip/minicondapy37/bin
++ SYSP=/home/hashim/.cache/pip/minicondapy37
++ '[' -n '' ']'
++ PATH=/home/hashim/.cache/pip/minicondapy37/bin:/home/hashim/.cache/pip/minicondapy37/bin:/home/hashim/.cache/pip/minicondapy37/condabin:/home/hashim/.cache/pip/minicondapy37/bin:/tmp/tmp.tPXahPUaJ4/.local/bin:/tmp/tmp.AwnbGcW8bL/bin:/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
++ export PATH
++ /home/hashim/.cache/pip/minicondapy37/bin/conda install -y -c conda-forge vowpalwabbit
Collecting package metadata (current_repodata.json): done
Solving environment: done
## Package Plan ##
environment location: /home/hashim/.cache/pip/minicondapy37
added / updated specs:
- vowpalwabbit
The following packages will be downloaded:
package | build
---------------------------|-----------------
boost-1.72.0 | py37h9de70de_0 316 KB conda-forge
boost-cpp-1.72.0 | h8e57a91_0 21.8 MB conda-forge
bzip2-1.0.8 | h516909a_2 396 KB conda-forge
ca-certificates-2020.4.5.1 | hecc5488_0 146 KB conda-forge
certifi-2020.4.5.1 | py37hc8dfbb8_0 151 KB conda-forge
conda-4.8.3 | py37hc8dfbb8_1 3.0 MB conda-forge
icu-64.2 | he1b5a44_1 12.6 MB conda-forge
libblas-3.8.0 | 14_openblas 10 KB conda-forge
libcblas-3.8.0 | 14_openblas 10 KB conda-forge
libgfortran-ng-7.5.0 | hdf63c60_6 1.7 MB conda-forge
liblapack-3.8.0 | 14_openblas 10 KB conda-forge
libopenblas-0.3.7 | h5ec1e0e_6 7.6 MB conda-forge
numpy-1.18.4 | py37h8960a57_0 5.2 MB conda-forge
openssl-1.1.1g | h516909a_0 2.1 MB conda-forge
python_abi-3.7 | 1_cp37m 4 KB conda-forge
vowpalwabbit-8.8.1 | py37hff0ad0e_1 1.4 MB conda-forge
------------------------------------------------------------
Total: 56.6 MB
The following NEW packages will be INSTALLED:
boost conda-forge/linux-64::boost-1.72.0-py37h9de70de_0
boost-cpp conda-forge/linux-64::boost-cpp-1.72.0-h8e57a91_0
bzip2 conda-forge/linux-64::bzip2-1.0.8-h516909a_2
icu conda-forge/linux-64::icu-64.2-he1b5a44_1
libblas conda-forge/linux-64::libblas-3.8.0-14_openblas
libcblas conda-forge/linux-64::libcblas-3.8.0-14_openblas
libgfortran-ng conda-forge/linux-64::libgfortran-ng-7.5.0-hdf63c60_6
liblapack conda-forge/linux-64::liblapack-3.8.0-14_openblas
libopenblas conda-forge/linux-64::libopenblas-0.3.7-h5ec1e0e_6
numpy conda-forge/linux-64::numpy-1.18.4-py37h8960a57_0
python_abi conda-forge/linux-64::python_abi-3.7-1_cp37m
vowpalwabbit conda-forge/linux-64::vowpalwabbit-8.8.1-py37hff0ad0e_1
The following packages will be UPDATED:
ca-certificates pkgs/main::ca-certificates-2020.1.1-0 --> conda-forge::ca-certificates-2020.4.5.1-hecc5488_0
conda pkgs/main::conda-4.8.3-py37_0 --> conda-forge::conda-4.8.3-py37hc8dfbb8_1
The following packages will be SUPERSEDED by a higher-priority channel:
certifi pkgs/main::certifi-2020.4.5.1-py37_0 --> conda-forge::certifi-2020.4.5.1-py37hc8dfbb8_0
openssl pkgs/main::openssl-1.1.1g-h7b6447c_0 --> conda-forge::openssl-1.1.1g-h516909a_0
Downloading and Extracting Packages
liblapack-3.8.0 | 10 KB | #################################### | 100%
ca-certificates-2020 | 146 KB | #################################### | 100%
bzip2-1.0.8 | 396 KB | #################################### | 100%
boost-cpp-1.72.0 | 21.8 MB | #################################### | 100%
libblas-3.8.0 | 10 KB | #################################### | 100%
icu-64.2 | 12.6 MB | #################################### | 100%
python_abi-3.7 | 4 KB | #################################### | 100%
boost-1.72.0 | 316 KB | #################################### | 100%
certifi-2020.4.5.1 | 151 KB | #################################### | 100%
openssl-1.1.1g | 2.1 MB | #################################### | 100%
conda-4.8.3 | 3.0 MB | #################################### | 100%
libcblas-3.8.0 | 10 KB | #################################### | 100%
libopenblas-0.3.7 | 7.6 MB | #################################### | 100%
libgfortran-ng-7.5.0 | 1.7 MB | #################################### | 100%
vowpalwabbit-8.8.1 | 1.4 MB | #################################### | 100%
numpy-1.18.4 | 5.2 MB | #################################### | 100%
Preparing transaction: done
Verifying transaction: done
Executing transaction: done
++ local t1=0
++ PATH=/home/hashim/.cache/pip/minicondapy37/bin:/home/hashim/.cache/pip/minicondapy37/condabin:/home/hashim/.cache/pip/minicondapy37/bin:/tmp/tmp.tPXahPUaJ4/.local/bin:/tmp/tmp.AwnbGcW8bL/bin:/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
++ '[' 0 = 0 ']'
++ __conda_reactivate
++ local ask_conda
++ CONDA_INTERNAL_OLDPATH=/home/hashim/.cache/pip/minicondapy37/bin:/home/hashim/.cache/pip/minicondapy37/condabin:/home/hashim/.cache/pip/minicondapy37/bin:/tmp/tmp.tPXahPUaJ4/.local/bin:/tmp/tmp.AwnbGcW8bL/bin:/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
++ __add_sys_prefix_to_path
++ '[' -n '' ']'
+++ dirname /home/hashim/.cache/pip/minicondapy37/bin/conda
++ SYSP=/home/hashim/.cache/pip/minicondapy37/bin
+++ dirname /home/hashim/.cache/pip/minicondapy37/bin
++ SYSP=/home/hashim/.cache/pip/minicondapy37
++ '[' -n '' ']'
++ PATH=/home/hashim/.cache/pip/minicondapy37/bin:/home/hashim/.cache/pip/minicondapy37/bin:/home/hashim/.cache/pip/minicondapy37/condabin:/home/hashim/.cache/pip/minicondapy37/bin:/tmp/tmp.tPXahPUaJ4/.local/bin:/tmp/tmp.AwnbGcW8bL/bin:/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
++ export PATH
+++ PS1='(base) (tmp.AwnbGcW8bL) '
+++ /home/hashim/.cache/pip/minicondapy37/bin/conda shell.posix reactivate
++ ask_conda='PS1='\''(base) (tmp.AwnbGcW8bL) '\''
export PATH='\''/home/hashim/.cache/pip/minicondapy37/bin:/home/hashim/.cache/pip/minicondapy37/condabin:/home/hashim/.cache/pip/minicondapy37/bin:/tmp/tmp.tPXahPUaJ4/.local/bin:/tmp/tmp.AwnbGcW8bL/bin:/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin'\''
export CONDA_SHLVL='\''1'\''
export CONDA_PROMPT_MODIFIER='\''(base) '\'''
++ PATH=/home/hashim/.cache/pip/minicondapy37/bin:/home/hashim/.cache/pip/minicondapy37/condabin:/home/hashim/.cache/pip/minicondapy37/bin:/tmp/tmp.tPXahPUaJ4/.local/bin:/tmp/tmp.AwnbGcW8bL/bin:/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
++ eval 'PS1='\''(base) (tmp.AwnbGcW8bL) '\''
export PATH='\''/home/hashim/.cache/pip/minicondapy37/bin:/home/hashim/.cache/pip/minicondapy37/condabin:/home/hashim/.cache/pip/minicondapy37/bin:/tmp/tmp.tPXahPUaJ4/.local/bin:/tmp/tmp.AwnbGcW8bL/bin:/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin'\''
export CONDA_SHLVL='\''1'\''
export CONDA_PROMPT_MODIFIER='\''(base) '\'''
+++ PS1='(base) (tmp.AwnbGcW8bL) '
+++ export PATH=/home/hashim/.cache/pip/minicondapy37/bin:/home/hashim/.cache/pip/minicondapy37/condabin:/home/hashim/.cache/pip/minicondapy37/bin:/tmp/tmp.tPXahPUaJ4/.local/bin:/tmp/tmp.AwnbGcW8bL/bin:/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
+++ PATH=/home/hashim/.cache/pip/minicondapy37/bin:/home/hashim/.cache/pip/minicondapy37/condabin:/home/hashim/.cache/pip/minicondapy37/bin:/tmp/tmp.tPXahPUaJ4/.local/bin:/tmp/tmp.AwnbGcW8bL/bin:/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
+++ export CONDA_SHLVL=1
+++ CONDA_SHLVL=1
+++ export 'CONDA_PROMPT_MODIFIER=(base) '
+++ CONDA_PROMPT_MODIFIER='(base) '
++ __conda_hashr
++ '[' -n '' ']'
++ '[' -n '' ']'
++ hash -r
++ [[ x. == \x\m\o\d\e\l\/\d\a\a\l\4\p\y ]]
++ [[ x. == \x\. ]]
++ set +e
++ conda install -y -c intel daal4py
++ '[' 5 -lt 1 ']'
++ local cmd=install
++ shift
++ case "$cmd" in
++ CONDA_INTERNAL_OLDPATH=/home/hashim/.cache/pip/minicondapy37/bin:/home/hashim/.cache/pip/minicondapy37/condabin:/home/hashim/.cache/pip/minicondapy37/bin:/tmp/tmp.tPXahPUaJ4/.local/bin:/tmp/tmp.AwnbGcW8bL/bin:/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
++ __add_sys_prefix_to_path
++ '[' -n '' ']'
+++ dirname /home/hashim/.cache/pip/minicondapy37/bin/conda
++ SYSP=/home/hashim/.cache/pip/minicondapy37/bin
+++ dirname /home/hashim/.cache/pip/minicondapy37/bin
++ SYSP=/home/hashim/.cache/pip/minicondapy37
++ '[' -n '' ']'
++ PATH=/home/hashim/.cache/pip/minicondapy37/bin:/home/hashim/.cache/pip/minicondapy37/bin:/home/hashim/.cache/pip/minicondapy37/condabin:/home/hashim/.cache/pip/minicondapy37/bin:/tmp/tmp.tPXahPUaJ4/.local/bin:/tmp/tmp.AwnbGcW8bL/bin:/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
++ export PATH
++ /home/hashim/.cache/pip/minicondapy37/bin/conda install -y -c intel daal4py
Collecting package metadata (current_repodata.json): done
Solving environment: done
## Package Plan ##
environment location: /home/hashim/.cache/pip/minicondapy37
added / updated specs:
- daal4py
The following packages will be downloaded:
package | build
---------------------------|-----------------
certifi-2019.11.28 | py37_0 147 KB intel
daal-2020.1 | intel_217 66.4 MB intel
daal4py-2020.1 | py37ha68da19_0 12.5 MB intel
impi_rt-2019.7 | intel_217 20.0 MB intel
intelpython-2020.1 | 0 502 KB intel
tbb-2020.2 | intel_217 936 KB intel
------------------------------------------------------------
Total: 100.4 MB
The following NEW packages will be INSTALLED:
daal intel/linux-64::daal-2020.1-intel_217
daal4py intel/linux-64::daal4py-2020.1-py37ha68da19_0
impi_rt intel/linux-64::impi_rt-2019.7-intel_217
intelpython intel/linux-64::intelpython-2020.1-0
tbb intel/linux-64::tbb-2020.2-intel_217
The following packages will be SUPERSEDED by a higher-priority channel:
certifi conda-forge::certifi-2020.4.5.1-py37h~ --> intel::certifi-2019.11.28-py37_0
Downloading and Extracting Packages
certifi-2019.11.28 | 147 KB | #################################### | 100%
daal4py-2020.1 | 12.5 MB | #################################### | 100%
impi_rt-2019.7 | 20.0 MB | #################################### | 100%
tbb-2020.2 | 936 KB | #################################### | 100%
intelpython-2020.1 | 502 KB | #################################### | 100%
daal-2020.1 | 66.4 MB | #################################### | 100%
Preparing transaction: done
Verifying transaction: done
Executing transaction: done
++ local t1=0
++ PATH=/home/hashim/.cache/pip/minicondapy37/bin:/home/hashim/.cache/pip/minicondapy37/condabin:/home/hashim/.cache/pip/minicondapy37/bin:/tmp/tmp.tPXahPUaJ4/.local/bin:/tmp/tmp.AwnbGcW8bL/bin:/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
++ '[' 0 = 0 ']'
++ __conda_reactivate
++ local ask_conda
++ CONDA_INTERNAL_OLDPATH=/home/hashim/.cache/pip/minicondapy37/bin:/home/hashim/.cache/pip/minicondapy37/condabin:/home/hashim/.cache/pip/minicondapy37/bin:/tmp/tmp.tPXahPUaJ4/.local/bin:/tmp/tmp.AwnbGcW8bL/bin:/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
++ __add_sys_prefix_to_path
++ '[' -n '' ']'
+++ dirname /home/hashim/.cache/pip/minicondapy37/bin/conda
++ SYSP=/home/hashim/.cache/pip/minicondapy37/bin
+++ dirname /home/hashim/.cache/pip/minicondapy37/bin
++ SYSP=/home/hashim/.cache/pip/minicondapy37
++ '[' -n '' ']'
++ PATH=/home/hashim/.cache/pip/minicondapy37/bin:/home/hashim/.cache/pip/minicondapy37/bin:/home/hashim/.cache/pip/minicondapy37/condabin:/home/hashim/.cache/pip/minicondapy37/bin:/tmp/tmp.tPXahPUaJ4/.local/bin:/tmp/tmp.AwnbGcW8bL/bin:/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
++ export PATH
+++ PS1='(base) (tmp.AwnbGcW8bL) '
+++ /home/hashim/.cache/pip/minicondapy37/bin/conda shell.posix reactivate
++ ask_conda='. "/home/hashim/.cache/pip/minicondapy37/etc/conda/deactivate.d/mpivars.deactivate.sh"
PS1='\''(base) (tmp.AwnbGcW8bL) '\''
export PATH='\''/home/hashim/.cache/pip/minicondapy37/bin:/home/hashim/.cache/pip/minicondapy37/condabin:/home/hashim/.cache/pip/minicondapy37/bin:/tmp/tmp.tPXahPUaJ4/.local/bin:/tmp/tmp.AwnbGcW8bL/bin:/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin'\''
export CONDA_SHLVL='\''1'\''
export CONDA_PROMPT_MODIFIER='\''(base) '\''
. "/home/hashim/.cache/pip/minicondapy37/etc/conda/activate.d/mpivars.activate.sh"'
++ PATH=/home/hashim/.cache/pip/minicondapy37/bin:/home/hashim/.cache/pip/minicondapy37/condabin:/home/hashim/.cache/pip/minicondapy37/bin:/tmp/tmp.tPXahPUaJ4/.local/bin:/tmp/tmp.AwnbGcW8bL/bin:/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
++ eval '. "/home/hashim/.cache/pip/minicondapy37/etc/conda/deactivate.d/mpivars.deactivate.sh"
PS1='\''(base) (tmp.AwnbGcW8bL) '\''
export PATH='\''/home/hashim/.cache/pip/minicondapy37/bin:/home/hashim/.cache/pip/minicondapy37/condabin:/home/hashim/.cache/pip/minicondapy37/bin:/tmp/tmp.tPXahPUaJ4/.local/bin:/tmp/tmp.AwnbGcW8bL/bin:/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin'\''
export CONDA_SHLVL='\''1'\''
export CONDA_PROMPT_MODIFIER='\''(base) '\''
. "/home/hashim/.cache/pip/minicondapy37/etc/conda/activate.d/mpivars.activate.sh"'
+++ . /home/hashim/.cache/pip/minicondapy37/etc/conda/deactivate.d/mpivars.deactivate.sh
++++ '[' '' '!=' 1 ']'
+++++ echo
+++++ sed 's|/home/hashim/.cache/pip/minicondapy37/lib/mpi.jar:\?||'
++++ export CLASSPATH=
++++ CLASSPATH=
+++++ echo
+++++ sed 's|/home/hashim/.cache/pip/minicondapy37/lib/libfabric:\?||'
++++ export LD_LIBRARY_PATH=
++++ LD_LIBRARY_PATH=
+++++ sed 's|/home/hashim/.cache/pip/minicondapy37/lib:\?||'
+++++ echo
++++ export LD_LIBRARY_PATH=
++++ LD_LIBRARY_PATH=
+++++ echo
+++++ sed 's|/home/hashim/.cache/pip/minicondapy37/share/man:\?||'
++++ export MANPATH=
++++ MANPATH=
+++++ echo
+++++ sed 's|/home/hashim/.cache/pip/minicondapy37/lib/libfabric:\?||'
++++ export LIBRARY_PATH=
++++ LIBRARY_PATH=
+++++ echo /home/hashim/.cache/pip/minicondapy37/bin:/home/hashim/.cache/pip/minicondapy37/condabin:/home/hashim/.cache/pip/minicondapy37/bin:/tmp/tmp.tPXahPUaJ4/.local/bin:/tmp/tmp.AwnbGcW8bL/bin:/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
+++++ sed 's|/home/hashim/.cache/pip/minicondapy37/bin/libfabric:||'
++++ export PATH=/home/hashim/.cache/pip/minicondapy37/bin:/home/hashim/.cache/pip/minicondapy37/condabin:/home/hashim/.cache/pip/minicondapy37/bin:/tmp/tmp.tPXahPUaJ4/.local/bin:/tmp/tmp.AwnbGcW8bL/bin:/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
++++ PATH=/home/hashim/.cache/pip/minicondapy37/bin:/home/hashim/.cache/pip/minicondapy37/condabin:/home/hashim/.cache/pip/minicondapy37/bin:/tmp/tmp.tPXahPUaJ4/.local/bin:/tmp/tmp.AwnbGcW8bL/bin:/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
+++++ which fi_info
++++ FIP=
++++ echo ''
++++ grep -q 'compilers_and_libraries.*mpi'
++++ grep -q inteloneapi/mpi
++++ echo ''
++++ export I_MPI_ROOT=
++++ I_MPI_ROOT=
++++ echo ''
++++ grep -q /home/hashim/.cache/pip/minicondapy37
+++ PS1='(base) (tmp.AwnbGcW8bL) '
+++ export PATH=/home/hashim/.cache/pip/minicondapy37/bin:/home/hashim/.cache/pip/minicondapy37/condabin:/home/hashim/.cache/pip/minicondapy37/bin:/tmp/tmp.tPXahPUaJ4/.local/bin:/tmp/tmp.AwnbGcW8bL/bin:/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
+++ PATH=/home/hashim/.cache/pip/minicondapy37/bin:/home/hashim/.cache/pip/minicondapy37/condabin:/home/hashim/.cache/pip/minicondapy37/bin:/tmp/tmp.tPXahPUaJ4/.local/bin:/tmp/tmp.AwnbGcW8bL/bin:/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
+++ export CONDA_SHLVL=1
+++ CONDA_SHLVL=1
+++ export 'CONDA_PROMPT_MODIFIER=(base) '
+++ CONDA_PROMPT_MODIFIER='(base) '
+++ . /home/hashim/.cache/pip/minicondapy37/etc/conda/activate.d/mpivars.activate.sh
++++ '[' '' '!=' 1 ']'
++++ export I_MPI_ROOT=/home/hashim/.cache/pip/minicondapy37
++++ I_MPI_ROOT=/home/hashim/.cache/pip/minicondapy37
++++ '[' -z '' ']'
++++ export CLASSPATH=/home/hashim/.cache/pip/minicondapy37/lib/mpi.jar
++++ CLASSPATH=/home/hashim/.cache/pip/minicondapy37/lib/mpi.jar
++++ '[' -z '' ']'
+++++ manpath
++++ export MANPATH=/home/hashim/.cache/pip/minicondapy37/share/man:
++++ MANPATH=/home/hashim/.cache/pip/minicondapy37/share/man:
++++ '[' -z '' ']'
++++ i_mpi_ofi_library_internal=1
++++ case "$i_mpi_ofi_library_internal" in
++++ export PATH=/home/hashim/.cache/pip/minicondapy37/bin/libfabric:/home/hashim/.cache/pip/minicondapy37/bin:/home/hashim/.cache/pip/minicondapy37/condabin:/home/hashim/.cache/pip/minicondapy37/bin:/tmp/tmp.tPXahPUaJ4/.local/bin:/tmp/tmp.AwnbGcW8bL/bin:/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
++++ PATH=/home/hashim/.cache/pip/minicondapy37/bin/libfabric:/home/hashim/.cache/pip/minicondapy37/bin:/home/hashim/.cache/pip/minicondapy37/condabin:/home/hashim/.cache/pip/minicondapy37/bin:/tmp/tmp.tPXahPUaJ4/.local/bin:/tmp/tmp.AwnbGcW8bL/bin:/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
++++ export LD_LIBRARY_PATH=/home/hashim/.cache/pip/minicondapy37/lib/libfabric:
++++ LD_LIBRARY_PATH=/home/hashim/.cache/pip/minicondapy37/lib/libfabric:
++++ '[' -z '' ']'
++++ export LIBRARY_PATH=/home/hashim/.cache/pip/minicondapy37/lib/libfabric
++++ LIBRARY_PATH=/home/hashim/.cache/pip/minicondapy37/lib/libfabric
++++ export FI_PROVIDER_PATH=/home/hashim/.cache/pip/minicondapy37/lib/libfabric/prov
++++ FI_PROVIDER_PATH=/home/hashim/.cache/pip/minicondapy37/lib/libfabric/prov
++ __conda_hashr
++ '[' -n '' ']'
++ '[' -n '' ']'
++ hash -r
++ echo 'conda install -y -c intel daal4py, exit code: 0'
conda install -y -c intel daal4py, exit code: 0
++ set -e
++ '[' x. == xmodel/tensorflow_hub ']'
++ [[ x. == \x\o\p\e\r\a\t\i\o\n\s\/\d\e\p\l\o\y ]]
+ runit
+ exec /tmp/cmd.sh
+ '[' -d /tmp/tmp.tPXahPUaJ4/.local/bin ']'
+ export PATH=/tmp/tmp.tPXahPUaJ4/.local/bin:/home/hashim/.cache/pip/minicondapy37/bin/libfabric:/home/hashim/.cache/pip/minicondapy37/bin:/home/hashim/.cache/pip/minicondapy37/condabin:/home/hashim/.cache/pip/minicondapy37/bin:/tmp/tmp.tPXahPUaJ4/.local/bin:/tmp/tmp.AwnbGcW8bL/bin:/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
+ PATH=/tmp/tmp.tPXahPUaJ4/.local/bin:/home/hashim/.cache/pip/minicondapy37/bin/libfabric:/home/hashim/.cache/pip/minicondapy37/bin:/home/hashim/.cache/pip/minicondapy37/condabin:/home/hashim/.cache/pip/minicondapy37/bin:/tmp/tmp.tPXahPUaJ4/.local/bin:/tmp/tmp.AwnbGcW8bL/bin:/usr/local/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
+ SRC_ROOT=/usr/src/dffml
+ PYTHON=python3
+ '[' x/tmp/tmp.AwnbGcW8bL '!=' x ']'
+ PYTHON=python
+ TEMP_DIRS=()
+ trap cleanup_temp_dirs EXIT
+ '[' x. == xchangelog ']'
+ '[' x. == xwhitespace ']'
+ '[' x. == xstyle ']'
+ '[' x. == xdocs ']'
+ '[' x. == xlines ']'
+ '[' -d . ']'
+ run_plugin .
+ export PLUGIN=.
+ PLUGIN=.
+ '[' x. = xexamples/shouldi ']'
+ cd /usr/src/dffml/.
+ python -m pip install -U -e .
Obtaining file:///usr/src/dffml
Installing build dependencies ... done
Getting requirements to build wheel ... done
Preparing wheel metadata ... done
Installing collected packages: dffml
Attempting uninstall: dffml
Found existing installation: dffml 0.3.7
Uninstalling dffml-0.3.7:
Successfully uninstalled dffml-0.3.7
Running setup.py develop for dffml
Successfully installed dffml
+ python setup.py test
running test
WARNING: Testing via this command is deprecated and will be removed in a future version. Users looking for a generic test entry point independent of test runner are encouraged to use tox.
running egg_info
writing dffml.egg-info/PKG-INFO
writing dependency_links to dffml.egg-info/dependency_links.txt
writing entry points to dffml.egg-info/entry_points.txt
writing requirements to dffml.egg-info/requires.txt
writing top-level names to dffml.egg-info/top_level.txt
reading manifest file 'dffml.egg-info/SOURCES.txt'
reading manifest template 'MANIFEST.in'
writing manifest file 'dffml.egg-info/SOURCES.txt'
running build_ext
test_str (tests.test_accuracy.TestAccuracry) ... ok
test_0_create_table (tests.db.test_sqlite.TestSqlDatabase) ... DEBUG:dffml.SqliteDatabase:SqliteDatabaseConfig(filename='/tmp/tmpafu7kxsr.db')
DEBUG:dffml.SqliteDatabaseContext:CREATE TABLE IF NOT EXISTS myTable (`key` INTEGER NOT NULL PRIMARY KEY, `firstName` text, `lastName` text, `age` real)
ok
test_1_set_get (tests.db.test_sqlite.TestSqlDatabase) ... DEBUG:dffml.SqliteDatabase:SqliteDatabaseConfig(filename='/tmp/tmpafu7kxsr.db')
DEBUG:dffml.SqliteDatabaseContext:INSERT INTO myTable ( `key`, `firstName`, `lastName`, `age` ) VALUES( ?, ?, ?, ? )
DEBUG:dffml.SqliteDatabaseContext:INSERT INTO myTable ( `key`, `firstName`, `lastName`, `age` ) VALUES( ?, ?, ?, ? )
DEBUG:dffml.SqliteDatabaseContext:INSERT INTO myTable ( `key`, `firstName`, `lastName`, `age` ) VALUES( ?, ?, ?, ? )
DEBUG:dffml.SqliteDatabaseContext:SELECT * FROM myTable
ok
test_2_update (tests.db.test_sqlite.TestSqlDatabase) ... DEBUG:dffml.SqliteDatabase:SqliteDatabaseConfig(filename='/tmp/tmpafu7kxsr.db')
DEBUG:dffml.SqliteDatabaseContext:UPDATE myTable SET `age` = ? WHERE ((`firstName` = ? ) OR (`lastName` = ? )) AND ((`age` < ? ))
DEBUG:dffml.SqliteDatabaseContext:SELECT `age` FROM myTable WHERE ((`firstName` = ? ))
ok
test_3_remove (tests.db.test_sqlite.TestSqlDatabase) ... DEBUG:dffml.SqliteDatabase:SqliteDatabaseConfig(filename='/tmp/tmpafu7kxsr.db')
DEBUG:dffml.SqliteDatabaseContext:DELETE FROM myTable WHERE ((`firstName` = ? ))
DEBUG:dffml.SqliteDatabaseContext:SELECT `firstName` FROM myTable
ok
test_4_insert_or_update (tests.db.test_sqlite.TestSqlDatabase) ... DEBUG:dffml.SqliteDatabase:SqliteDatabaseConfig(filename='/tmp/tmpafu7kxsr.db')
DEBUG:dffml.SqliteDatabaseContext:INSERT INTO myTable ( `key`, `firstName` ) VALUES( ?, ? )
DEBUG:dffml.SqliteDatabaseContext:UPDATE myTable SET `firstName` = ? WHERE ((`key` = ? ))
DEBUG:dffml.SqliteDatabaseContext:SELECT * FROM myTable
ok
test_create_from_path (tests.df.test_df_create.TestDataflowCreate) ... DEBUG:dffml.cli.dataflow.Create:Setting config = <class 'dffml.configloader.json.JSONConfigLoader'>
DEBUG:dffml.cli.dataflow.Create:Setting log = 20
DEBUG:dffml.cli.dataflow.Create:Setting not_linked = False
DEBUG:dffml.cli.dataflow.Create:Setting operations = ['ops:echo_string', 'get_single']
DEBUG:dffml.cli.dataflow.Create:Setting seed = [(['OutputString'], 'get_single_spec')]
DEBUG:dffml.JSONConfigLoader:BaseConfig()
DEBUG:dffml.MemoryInputNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationNetwork:MemoryOperationNetworkConfig(operations=[])
DEBUG:dffml.MemoryLockNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
DEBUG:dffml.MemoryRedundancyChecker:BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))
DEBUG:dffml.MemoryOrchestrator:MemoryOrchestratorConfig(input_network=MemoryInputNetwork(BaseConfig()), operation_network=MemoryOperationNetwork(MemoryOperationNetworkConfig(operations=[])), lock_network=MemoryLockNetwork(BaseConfig()), opimp_network=MemoryOperationImplementationNetwork(MemoryOperationImplementationNetworkConfig(operations={})), rchecker=MemoryRedundancyChecker(BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))))
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf14dbf10>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation get_single(get_single) with base config
DEBUG:dffml.GetSingleImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation ops:echo_string(ops:echo_string) with base config
DEBUG:dffml.EchoStringImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf14dbf10>: ([Input(value=Irregular at magic school, definition=InputString)],)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=Irregular at magic school, definition=InputString)]
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: 98e115fd55d13b0adfc5f4797e2ccf69e32e7af6af43c776b336c9df4a3eab07
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] ops:echo_string
DEBUG:dffml.MemoryOrchestratorContext:[98e115fd55d13b0adfc5f4797e2ccf69e32e7af6af43c776b336c9df4a3eab07]: dispatch operation: ops:echo_string
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: ops:echo_string
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'input_string': 'Irregular at magic school'}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'output_string': 'Irregular at magic school'}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: get_single
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': ['OutputString']}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.GetSingle:output spec: [OutputString]
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'OutputString': 'Irregular at magic school'}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
ok
test_args (tests.df.test_memory.TestMemoryRedundancyChecker) ... ok
test_config_default_label (tests.df.test_memory.TestMemoryRedundancyChecker) ... DEBUG:dffml.KeyValueStoreWithArguments:KeyValueStoreWithArgumentsConfig(filename='somefile')
ok
test_auto_start (tests.df.test_auto_start.TestAutoStart) ... DEBUG:dffml.MemoryInputNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationNetwork:MemoryOperationNetworkConfig(operations=[])
DEBUG:dffml.MemoryLockNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
DEBUG:dffml.MemoryRedundancyChecker:BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))
DEBUG:dffml.MemoryOrchestrator:MemoryOrchestratorConfig(input_network=MemoryInputNetwork(BaseConfig()), operation_network=MemoryOperationNetwork(MemoryOperationNetworkConfig(operations=[])), lock_network=MemoryLockNetwork(BaseConfig()), opimp_network=MemoryOperationImplementationNetwork(MemoryOperationImplementationNetworkConfig(operations={})), rchecker=MemoryRedundancyChecker(BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))))
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf1799650>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation announce(announce) with base config
DEBUG:dffml.AnnounceImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation get_single(get_single) with base config
DEBUG:dffml.GetSingleImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf1799650>: ({'testStart': []},)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: []
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: testStart
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: announce
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'string_out': 'EXISTS'}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: get_single
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': ['string']}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.GetSingle:output spec: [string]
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'string': 'EXISTS'}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
ok
test_gen_auto_start (tests.df.test_async_iter.TestAsyncIter) ... DEBUG:dffml.MemoryInputNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationNetwork:MemoryOperationNetworkConfig(operations=[])
DEBUG:dffml.MemoryLockNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
DEBUG:dffml.MemoryRedundancyChecker:BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))
DEBUG:dffml.MemoryOrchestrator:MemoryOrchestratorConfig(input_network=MemoryInputNetwork(BaseConfig()), operation_network=MemoryOperationNetwork(MemoryOperationNetworkConfig(operations=[])), lock_network=MemoryLockNetwork(BaseConfig()), opimp_network=MemoryOperationImplementationNetwork(MemoryOperationImplementationNetworkConfig(operations={})), rchecker=MemoryRedundancyChecker(BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))))
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf1799b90>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation get_multi(get_multi) with base config
DEBUG:dffml.GetMultiImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation counter(counter) with base config
DEBUG:dffml.CounterImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation echo_num(echo_num) with base config
DEBUG:dffml.EchoNumImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf1799b90>: ({'TestCountAutoStart': [Input(value=1, definition=count_start)]},)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=1, definition=count_start)]
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: TestCountAutoStart
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] counter
DEBUG:dffml.MemoryOrchestratorContext:[TestCountAutoStart]: dispatch operation: counter
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: counter
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'count_start': 1}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: <async_generator object counter at 0x7f6cf14b8dd0>
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=True>] done
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] echo_num
DEBUG:dffml.MemoryOrchestratorContext:[TestCountAutoStart]: dispatch operation: echo_num
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=True>] done
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] echo_num
DEBUG:dffml.MemoryOrchestratorContext:[TestCountAutoStart]: dispatch operation: echo_num
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=True>] done
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] echo_num
DEBUG:dffml.MemoryOrchestratorContext:[TestCountAutoStart]: dispatch operation: echo_num
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=True>] done
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] echo_num
DEBUG:dffml.MemoryOrchestratorContext:[TestCountAutoStart]: dispatch operation: echo_num
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=True>] done
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] echo_num
DEBUG:dffml.MemoryOrchestratorContext:[TestCountAutoStart]: dispatch operation: echo_num
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: echo_num
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'number_in': 2}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'number_out': 2}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: echo_num
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'number_in': 5}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'number_out': 5}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: echo_num
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'number_in': 1}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'number_out': 1}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: echo_num
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'number_in': 4}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'number_out': 4}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: echo_num
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'number_in': 3}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'number_out': 3}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=False>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=False>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=False>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=False>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=False>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=False>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=False>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=False>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=False>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=False>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=False>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=False>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=False>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=False>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=False>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=False>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=False>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=False>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=False>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=False>] done
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: get_multi
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': ['number']}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.GetMulti:output spec: [number]
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'number': [2, 5, 1, 4, 3]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
ok
test_gen_with_input (tests.df.test_async_iter.TestAsyncIter) ... DEBUG:dffml.MemoryInputNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationNetwork:MemoryOperationNetworkConfig(operations=[])
DEBUG:dffml.MemoryLockNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
DEBUG:dffml.MemoryRedundancyChecker:BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))
DEBUG:dffml.MemoryOrchestrator:MemoryOrchestratorConfig(input_network=MemoryInputNetwork(BaseConfig()), operation_network=MemoryOperationNetwork(MemoryOperationNetworkConfig(operations=[])), lock_network=MemoryLockNetwork(BaseConfig()), opimp_network=MemoryOperationImplementationNetwork(MemoryOperationImplementationNetworkConfig(operations={})), rchecker=MemoryRedundancyChecker(BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))))
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf1d6a110>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation get_multi(get_multi) with base config
DEBUG:dffml.GetMultiImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation counter(counter) with base config
DEBUG:dffml.CounterImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation echo_num(echo_num) with base config
DEBUG:dffml.EchoNumImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf1d6a110>: ({'TestCount': [Input(value=1, definition=count_start)]},)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=1, definition=count_start)]
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: TestCount
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] counter
DEBUG:dffml.MemoryOrchestratorContext:[TestCount]: dispatch operation: counter
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: counter
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'count_start': 1}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: <async_generator object counter at 0x7f6cf14cb0e0>
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=True>] done
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] echo_num
DEBUG:dffml.MemoryOrchestratorContext:[TestCount]: dispatch operation: echo_num
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=True>] done
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] echo_num
DEBUG:dffml.MemoryOrchestratorContext:[TestCount]: dispatch operation: echo_num
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=True>] done
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] echo_num
DEBUG:dffml.MemoryOrchestratorContext:[TestCount]: dispatch operation: echo_num
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=True>] done
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] echo_num
DEBUG:dffml.MemoryOrchestratorContext:[TestCount]: dispatch operation: echo_num
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=True>] done
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] echo_num
DEBUG:dffml.MemoryOrchestratorContext:[TestCount]: dispatch operation: echo_num
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: echo_num
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'number_in': 1}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'number_out': 1}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: echo_num
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'number_in': 5}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'number_out': 5}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: echo_num
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'number_in': 2}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'number_out': 2}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: echo_num
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'number_in': 3}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'number_out': 3}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: echo_num
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'number_in': 4}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'number_out': 4}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=False>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=False>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=False>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=False>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=False>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=False>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=False>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=False>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=False>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=False>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=False>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=False>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=False>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=False>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=False>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=False>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=False>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=False>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=False>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=False>] done
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: get_multi
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': ['number']}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.GetMulti:output spec: [number]
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'number': [1, 5, 2, 3, 4]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
ok
test_export (tests.test_df.TestDataFlow) ... ok
test_resolve_missing_condition_definition (tests.test_df.TestDataFlow) ... ok
test_resolve_missing_input_output_definition (tests.test_df.TestDataFlow) ... ok
test_get_none (tests.test_df.TestMemoryKeyValueStore) ... DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
ok
test_get_set (tests.test_df.TestMemoryKeyValueStore) ... DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
ok
test_contains_false (tests.test_df.TestMemoryOperationImplementationNetwork) ... DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
ok
test_contains_true (tests.test_df.TestMemoryOperationImplementationNetwork) ... DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.AddImplementation:BaseConfig()
ok
test_instantiable_but_not_instantiated (tests.test_df.TestMemoryOperationImplementationNetwork) ... DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
ok
test_not_instantiable (tests.test_df.TestMemoryOperationImplementationNetwork) ... DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryOperationImplementationNetworkContext:OperationImplementation 'add' is not instantiable: 'add' was not found in ()
ok
test_run (tests.test_df.TestMemoryOperationImplementationNetwork) ... DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.AddImplementation:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: None
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'numbers': [40, 2]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {'is_add': True}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'sum': 42}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
ok
test_load (tests.test_df.TestOperation) ... ok
test_load_name_given (tests.test_df.TestOperation) ... ok
test_load (tests.test_df.TestOperationImplementation) ... ok
test_load_failure (tests.test_df.TestOperationImplementation) ... ok
test_load_name_given (tests.test_df.TestOperationImplementation) ... ok
test_run (tests.test_df.TestOrchestrator) ... DEBUG:dffml.MemoryInputNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationNetwork:MemoryOperationNetworkConfig(operations=[])
DEBUG:dffml.MemoryLockNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
DEBUG:dffml.MemoryRedundancyChecker:BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))
DEBUG:dffml.MemoryOrchestrator:MemoryOrchestratorConfig(input_network=MemoryInputNetwork(BaseConfig()), operation_network=MemoryOperationNetwork(MemoryOperationNetworkConfig(operations=[])), lock_network=MemoryLockNetwork(BaseConfig()), opimp_network=MemoryOperationImplementationNetwork(MemoryOperationImplementationNetworkConfig(operations={})), rchecker=MemoryRedundancyChecker(BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))))
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf199f890>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation get_single(get_single) with base config
DEBUG:dffml.GetSingleImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation add(add) with base config
DEBUG:dffml.AddImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation mult(mult) with base config
DEBUG:dffml.MultImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation parse_line(parse_line) with base config
DEBUG:dffml.ParseLineImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf199f890>: ({'add 40 and 2': [Input(value=add 40 and 2, definition=calc_string), Input(value=['result'], definition=get_single_spec)], 'multiply 42 and 10': [Input(value=multiply 42 and 10, definition=calc_string), Input(value=['result'], definition=get_single_spec)]},)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=add 40 and 2, definition=calc_string), Input(value=['result'], definition=get_single_spec)]
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=multiply 42 and 10, definition=calc_string), Input(value=['result'], definition=get_single_spec)]
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: add 40 and 2
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: multiply 42 and 10
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] parse_line
DEBUG:dffml.MemoryOrchestratorContext:[add 40 and 2]: dispatch operation: parse_line
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] parse_line
DEBUG:dffml.MemoryOrchestratorContext:[multiply 42 and 10]: dispatch operation: parse_line
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: parse_line
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'line': 'add 40 and 2'}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'add': True, 'mult': False, 'numbers': [40, 2]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: parse_line
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'line': 'multiply 42 and 10'}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'add': False, 'mult': True, 'numbers': [42, 10]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] add
DEBUG:dffml.MemoryOrchestratorContext:[add 40 and 2]: dispatch operation: add
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] mult
DEBUG:dffml.MemoryOrchestratorContext:[multiply 42 and 10]: dispatch operation: mult
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: add
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'numbers': [40, 2]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {'is_add': True}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'sum': 42}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: mult
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'numbers': [42, 10]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {'is_mult': True}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'product': 420}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: get_single
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': ['result']}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.GetSingle:output spec: [result]
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'result': 42}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: get_single
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': ['result']}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.GetSingle:output spec: [result]
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'result': 420}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf199f890>: (<dffml.df.memory.MemoryInputSet object at 0x7f6cf047cfd0>, <dffml.df.memory.MemoryInputSet object at 0x7f6cf047ce90>)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: <dffml.df.memory.MemoryInputSet object at 0x7f6cf047cfd0>
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: <dffml.df.memory.MemoryInputSet object at 0x7f6cf047ce90>
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: add 40 and 2
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: multiply 42 and 10
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] parse_line
DEBUG:dffml.MemoryOrchestratorContext:[add 40 and 2]: dispatch operation: parse_line
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] parse_line
DEBUG:dffml.MemoryOrchestratorContext:[multiply 42 and 10]: dispatch operation: parse_line
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: parse_line
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'line': 'add 40 and 2'}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'add': True, 'mult': False, 'numbers': [40, 2]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: parse_line
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'line': 'multiply 42 and 10'}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'add': False, 'mult': True, 'numbers': [42, 10]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] add
DEBUG:dffml.MemoryOrchestratorContext:[add 40 and 2]: dispatch operation: add
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] mult
DEBUG:dffml.MemoryOrchestratorContext:[multiply 42 and 10]: dispatch operation: mult
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: add
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'numbers': [40, 2]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {'is_add': True}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'sum': 42}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: mult
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'numbers': [42, 10]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {'is_mult': True}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'product': 420}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: get_single
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': ['result']}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.GetSingle:output spec: [result]
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'result': 42}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: get_single
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': ['result']}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.GetSingle:output spec: [result]
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'result': 420}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf199f890>: ([Input(value=add 40 and 2, definition=calc_string), Input(value=['result'], definition=get_single_spec)], [Input(value=multiply 42 and 10, definition=calc_string), Input(value=['result'], definition=get_single_spec)])
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=add 40 and 2, definition=calc_string), Input(value=['result'], definition=get_single_spec)]
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=multiply 42 and 10, definition=calc_string), Input(value=['result'], definition=get_single_spec)]
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: f5197902abde393a709f6b9a70055f4e31ff62cbab0c1852d8688a347a248534
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: 8c2b8fff2d850d6dc974750084aab6c81bc9b05da1663ac36acd2abf811b18c9
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] parse_line
DEBUG:dffml.MemoryOrchestratorContext:[f5197902abde393a709f6b9a70055f4e31ff62cbab0c1852d8688a347a248534]: dispatch operation: parse_line
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] parse_line
DEBUG:dffml.MemoryOrchestratorContext:[8c2b8fff2d850d6dc974750084aab6c81bc9b05da1663ac36acd2abf811b18c9]: dispatch operation: parse_line
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: parse_line
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'line': 'add 40 and 2'}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'add': True, 'mult': False, 'numbers': [40, 2]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: parse_line
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'line': 'multiply 42 and 10'}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'add': False, 'mult': True, 'numbers': [42, 10]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] add
DEBUG:dffml.MemoryOrchestratorContext:[f5197902abde393a709f6b9a70055f4e31ff62cbab0c1852d8688a347a248534]: dispatch operation: add
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] mult
DEBUG:dffml.MemoryOrchestratorContext:[8c2b8fff2d850d6dc974750084aab6c81bc9b05da1663ac36acd2abf811b18c9]: dispatch operation: mult
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: add
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'numbers': [40, 2]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {'is_add': True}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'sum': 42}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: mult
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'numbers': [42, 10]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {'is_mult': True}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'product': 420}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: get_single
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': ['result']}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.GetSingle:output spec: [result]
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'result': 42}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: get_single
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': ['result']}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.GetSingle:output spec: [result]
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'result': 420}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
ok
test_vaildation_by_op (tests.test_input_validation.TestDefintion) ... DEBUG:dffml.MemoryInputNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationNetwork:MemoryOperationNetworkConfig(operations=[])
DEBUG:dffml.MemoryLockNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
DEBUG:dffml.MemoryRedundancyChecker:BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))
DEBUG:dffml.MemoryOrchestrator:MemoryOrchestratorConfig(input_network=MemoryInputNetwork(BaseConfig()), operation_network=MemoryOperationNetwork(MemoryOperationNetworkConfig(operations=[])), lock_network=MemoryLockNetwork(BaseConfig()), opimp_network=MemoryOperationImplementationNetwork(MemoryOperationImplementationNetworkConfig(operations={})), rchecker=MemoryRedundancyChecker(BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))))
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf047ce90>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation validate_shout_instance(validate_shouts) with base config
DEBUG:dffml.ValidateShoutsImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation echo_shout(echo_shout) with base config
DEBUG:dffml.EchoShoutImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation get_single(get_single) with base config
DEBUG:dffml.GetSingleImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf047ce90>: ({'TestShoutOut': [Input(value=validation_status:, definition=shout_in)]},)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=validation_status:, definition=shout_in)]
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: TestShoutOut
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] validate_shout_instance
DEBUG:dffml.MemoryOrchestratorContext:[TestShoutOut]: dispatch operation: validate_shout_instance
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: validate_shout_instance
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'shout_in': 'validation_status:'}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'shout_in_validated': 'validation_status:_validated'}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] echo_shout
DEBUG:dffml.MemoryOrchestratorContext:[TestShoutOut]: dispatch operation: echo_shout
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: echo_shout
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'shout_in': 'validation_status:_validated'}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'shout_out': 'validation_status:_validated'}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: get_single
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': ['shout_out']}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.GetSingle:output spec: [shout_out]
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'shout_out': 'validation_status:_validated'}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
ok
test_validate (tests.test_input_validation.TestDefintion) ... DEBUG:dffml.MemoryInputNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationNetwork:MemoryOperationNetworkConfig(operations=[])
DEBUG:dffml.MemoryLockNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
DEBUG:dffml.MemoryRedundancyChecker:BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))
DEBUG:dffml.MemoryOrchestrator:MemoryOrchestratorConfig(input_network=MemoryInputNetwork(BaseConfig()), operation_network=MemoryOperationNetwork(MemoryOperationNetworkConfig(operations=[])), lock_network=MemoryLockNetwork(BaseConfig()), opimp_network=MemoryOperationImplementationNetwork(MemoryOperationImplementationNetworkConfig(operations={})), rchecker=MemoryRedundancyChecker(BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))))
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf047cfd0>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation get_circle(get_circle) with base config
DEBUG:dffml.GetCircleImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation get_single(get_single) with base config
DEBUG:dffml.GetSingleImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf047cfd0>: ({'area': [Input(value=UNITCIRCLE, definition=shape_name), Input(value=1, definition=radius), Input(value=3.14, definition=pie)]},)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=UNITCIRCLE, definition=shape_name), Input(value=1, definition=radius), Input(value=3.14, definition=pie)]
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: area
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] get_circle
DEBUG:dffml.MemoryOrchestratorContext:[area]: dispatch operation: get_circle
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: get_circle
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'name': 'UNITCIRCLE', 'radius': 1, 'pie': 3.14}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'shape': {'name': 'UNITCIRCLE', 'radius': 1, 'area': 3.14}}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: get_single
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': ['mapping']}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.GetSingle:output spec: [mapping]
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'mapping': {'name': 'UNITCIRCLE', 'radius': 1, 'area': 3.14}}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
ok
test_validation_error (tests.test_input_validation.TestDefintion) ... ok
DEBUG:dffml.SqliteDatabase:SqliteDatabaseConfig(filename='/tmp/tmpf2mdg9zo.db')
test_0_create (tests.operation.test_sqlite_query.TestSqliteQuery) ... DEBUG:dffml.MemoryInputNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationNetwork:MemoryOperationNetworkConfig(operations=[])
DEBUG:dffml.MemoryLockNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
DEBUG:dffml.MemoryRedundancyChecker:BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))
DEBUG:dffml.MemoryOrchestrator:MemoryOrchestratorConfig(input_network=MemoryInputNetwork(BaseConfig()), operation_network=MemoryOperationNetwork(MemoryOperationNetworkConfig(operations=[])), lock_network=MemoryLockNetwork(BaseConfig()), opimp_network=MemoryOperationImplementationNetwork(MemoryOperationImplementationNetworkConfig(operations={})), rchecker=MemoryRedundancyChecker(BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))))
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf0475390>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation db_query(db_query_create_table) with provided config DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='/tmp/tmpf2mdg9zo.db')))
DEBUG:dffml.DbQueryCreateTableImplementation:DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='/tmp/tmpf2mdg9zo.db')))
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation get_single(get_single) with base config
DEBUG:dffml.GetSingleImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf0475390>: ({'create': [Input(value=myTable, definition=query_table), Input(value={'key': 'real', 'firstName': 'text', 'lastName': 'text', 'age': 'real'}, definition=query_cols)]},)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=myTable, definition=query_table), Input(value={'key': 'real', 'firstName': 'text', 'lastName': 'text', 'age': 'real'}, definition=query_cols)]
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: create
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] db_query
DEBUG:dffml.MemoryOrchestratorContext:[create]: dispatch operation: db_query
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: db_query
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'table_name': 'myTable', 'cols': {'key': 'real', 'firstName': 'text', 'lastName': 'text', 'age': 'real'}}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.SqliteDatabaseContext:CREATE TABLE IF NOT EXISTS myTable (`key` real, `firstName` text, `lastName` text, `age` real)
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: None
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
ok
test_1_insert (tests.operation.test_sqlite_query.TestSqliteQuery) ... DEBUG:dffml.MemoryInputNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationNetwork:MemoryOperationNetworkConfig(operations=[])
DEBUG:dffml.MemoryLockNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
DEBUG:dffml.MemoryRedundancyChecker:BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))
DEBUG:dffml.MemoryOrchestrator:MemoryOrchestratorConfig(input_network=MemoryInputNetwork(BaseConfig()), operation_network=MemoryOperationNetwork(MemoryOperationNetworkConfig(operations=[])), lock_network=MemoryLockNetwork(BaseConfig()), opimp_network=MemoryOperationImplementationNetwork(MemoryOperationImplementationNetworkConfig(operations={})), rchecker=MemoryRedundancyChecker(BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))))
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf04753d0>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation db_query(db_query_insert) with provided config DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='/tmp/tmpf2mdg9zo.db')))
DEBUG:dffml.DbQueryInsertImplementation:DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='/tmp/tmpf2mdg9zo.db')))
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation get_single(get_single) with base config
DEBUG:dffml.GetSingleImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf04753d0>: ({'insert': [Input(value=myTable, definition=query_table), Input(value={'key': 10, 'firstName': 'John', 'lastName': 'Doe', 'age': 16}, definition=query_data)]},)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=myTable, definition=query_table), Input(value={'key': 10, 'firstName': 'John', 'lastName': 'Doe', 'age': 16}, definition=query_data)]
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: insert
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] db_query
DEBUG:dffml.MemoryOrchestratorContext:[insert]: dispatch operation: db_query
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: db_query
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'table_name': 'myTable', 'data': {'key': 10, 'firstName': 'John', 'lastName': 'Doe', 'age': 16}}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.SqliteDatabaseContext:INSERT INTO myTable ( `key`, `firstName`, `lastName`, `age` ) VALUES( ?, ?, ?, ? )
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: None
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
DEBUG:dffml.MemoryInputNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationNetwork:MemoryOperationNetworkConfig(operations=[])
DEBUG:dffml.MemoryLockNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
DEBUG:dffml.MemoryRedundancyChecker:BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))
DEBUG:dffml.MemoryOrchestrator:MemoryOrchestratorConfig(input_network=MemoryInputNetwork(BaseConfig()), operation_network=MemoryOperationNetwork(MemoryOperationNetworkConfig(operations=[])), lock_network=MemoryLockNetwork(BaseConfig()), opimp_network=MemoryOperationImplementationNetwork(MemoryOperationImplementationNetworkConfig(operations={})), rchecker=MemoryRedundancyChecker(BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))))
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf04753d0>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation db_query(db_query_insert) with provided config DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='/tmp/tmpf2mdg9zo.db')))
DEBUG:dffml.DbQueryInsertImplementation:DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='/tmp/tmpf2mdg9zo.db')))
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation get_single(get_single) with base config
DEBUG:dffml.GetSingleImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf04753d0>: ({'insert': [Input(value=myTable, definition=query_table), Input(value={'key': 11, 'firstName': 'John', 'lastName': 'Miles', 'age': 37}, definition=query_data)]},)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=myTable, definition=query_table), Input(value={'key': 11, 'firstName': 'John', 'lastName': 'Miles', 'age': 37}, definition=query_data)]
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: insert
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] db_query
DEBUG:dffml.MemoryOrchestratorContext:[insert]: dispatch operation: db_query
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: db_query
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'table_name': 'myTable', 'data': {'key': 11, 'firstName': 'John', 'lastName': 'Miles', 'age': 37}}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.SqliteDatabaseContext:INSERT INTO myTable ( `key`, `firstName`, `lastName`, `age` ) VALUES( ?, ?, ?, ? )
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: None
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
DEBUG:dffml.MemoryInputNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationNetwork:MemoryOperationNetworkConfig(operations=[])
DEBUG:dffml.MemoryLockNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
DEBUG:dffml.MemoryRedundancyChecker:BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))
DEBUG:dffml.MemoryOrchestrator:MemoryOrchestratorConfig(input_network=MemoryInputNetwork(BaseConfig()), operation_network=MemoryOperationNetwork(MemoryOperationNetworkConfig(operations=[])), lock_network=MemoryLockNetwork(BaseConfig()), opimp_network=MemoryOperationImplementationNetwork(MemoryOperationImplementationNetworkConfig(operations={})), rchecker=MemoryRedundancyChecker(BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))))
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf04753d0>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation db_query(db_query_insert) with provided config DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='/tmp/tmpf2mdg9zo.db')))
DEBUG:dffml.DbQueryInsertImplementation:DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='/tmp/tmpf2mdg9zo.db')))
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation get_single(get_single) with base config
DEBUG:dffml.GetSingleImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf04753d0>: ({'insert': [Input(value=myTable, definition=query_table), Input(value={'key': 12, 'firstName': 'Bill', 'lastName': 'Miles', 'age': 40}, definition=query_data)]},)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=myTable, definition=query_table), Input(value={'key': 12, 'firstName': 'Bill', 'lastName': 'Miles', 'age': 40}, definition=query_data)]
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: insert
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] db_query
DEBUG:dffml.MemoryOrchestratorContext:[insert]: dispatch operation: db_query
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: db_query
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'table_name': 'myTable', 'data': {'key': 12, 'firstName': 'Bill', 'lastName': 'Miles', 'age': 40}}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.SqliteDatabaseContext:INSERT INTO myTable ( `key`, `firstName`, `lastName`, `age` ) VALUES( ?, ?, ?, ? )
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: None
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
ok
test_2_lookup (tests.operation.test_sqlite_query.TestSqliteQuery) ... DEBUG:dffml.MemoryInputNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationNetwork:MemoryOperationNetworkConfig(operations=[])
DEBUG:dffml.MemoryLockNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
DEBUG:dffml.MemoryRedundancyChecker:BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))
DEBUG:dffml.MemoryOrchestrator:MemoryOrchestratorConfig(input_network=MemoryInputNetwork(BaseConfig()), operation_network=MemoryOperationNetwork(MemoryOperationNetworkConfig(operations=[])), lock_network=MemoryLockNetwork(BaseConfig()), opimp_network=MemoryOperationImplementationNetwork(MemoryOperationImplementationNetworkConfig(operations={})), rchecker=MemoryRedundancyChecker(BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))))
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf048bc90>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation db_query(db_query_lookup) with provided config DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='/tmp/tmpf2mdg9zo.db')))
DEBUG:dffml.DbQueryLookupImplementation:DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='/tmp/tmpf2mdg9zo.db')))
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation get_single(get_single) with base config
DEBUG:dffml.GetSingleImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf048bc90>: ({'lookup': [Input(value=myTable, definition=query_table), Input(value=[], definition=query_cols), Input(value=[], definition=query_conditions)]},)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=myTable, definition=query_table), Input(value=[], definition=query_cols), Input(value=[], definition=query_conditions)]
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: lookup
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] db_query
DEBUG:dffml.MemoryOrchestratorContext:[lookup]: dispatch operation: db_query
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: db_query
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'table_name': 'myTable', 'cols': [], 'conditions': []}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.SqliteDatabaseContext:SELECT * FROM myTable
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'lookups': [{'key': 10.0, 'firstName': 'John', 'lastName': 'Doe', 'age': 16.0}, {'key': 11.0, 'firstName': 'John', 'lastName': 'Miles', 'age': 37.0}, {'key': 12.0, 'firstName': 'Bill', 'lastName': 'Miles', 'age': 40.0}]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: get_single
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': ['query_lookups']}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.GetSingle:output spec: [query_lookups]
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'query_lookups': [{'key': 10.0, 'firstName': 'John', 'lastName': 'Doe', 'age': 16.0}, {'key': 11.0, 'firstName': 'John', 'lastName': 'Miles', 'age': 37.0}, {'key': 12.0, 'firstName': 'Bill', 'lastName': 'Miles', 'age': 40.0}]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
ok
test_AcceptUserInput (tests.operation.test_io.TestInputOutput) ... DEBUG:dffml.MemoryInputNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationNetwork:MemoryOperationNetworkConfig(operations=[])
DEBUG:dffml.MemoryLockNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
DEBUG:dffml.MemoryRedundancyChecker:BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))
DEBUG:dffml.MemoryOrchestrator:MemoryOrchestratorConfig(input_network=MemoryInputNetwork(BaseConfig()), operation_network=MemoryOperationNetwork(MemoryOperationNetworkConfig(operations=[])), lock_network=MemoryLockNetwork(BaseConfig()), opimp_network=MemoryOperationImplementationNetwork(MemoryOperationImplementationNetworkConfig(operations={})), rchecker=MemoryRedundancyChecker(BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))))
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf048b350>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation AcceptUserInput(AcceptUserInput) with base config
DEBUG:dffml.AcceptUserInput:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation get_single(get_single) with base config
DEBUG:dffml.GetSingleImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf048b350>: ({'testInput': []},)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: []
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: testInput
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: AcceptUserInput
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'InputData': 'Testing AcceptUserInput'}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: get_single
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': ['UserInput']}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.GetSingle:output spec: [UserInput]
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'UserInput': 'Testing AcceptUserInput'}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
ok
test_print_output (tests.operation.test_io.TestInputOutput) ... DEBUG:dffml.MemoryInputNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationNetwork:MemoryOperationNetworkConfig(operations=[])
DEBUG:dffml.MemoryLockNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
DEBUG:dffml.MemoryRedundancyChecker:BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))
DEBUG:dffml.MemoryOrchestrator:MemoryOrchestratorConfig(input_network=MemoryInputNetwork(BaseConfig()), operation_network=MemoryOperationNetwork(MemoryOperationNetworkConfig(operations=[])), lock_network=MemoryLockNetwork(BaseConfig()), opimp_network=MemoryOperationImplementationNetwork(MemoryOperationImplementationNetworkConfig(operations={})), rchecker=MemoryRedundancyChecker(BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))))
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf046f150>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation print_output(print_output) with base config
DEBUG:dffml.PrintOutputImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation get_single(get_single) with base config
DEBUG:dffml.GetSingleImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf046f150>: ([Input(value=Testing print_output, definition=DataToPrint)],)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=Testing print_output, definition=DataToPrint)]
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: 8151bcd1f0d1b50ad343b92b59dd60d47de85149989949c87b0e6af569065a93
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] print_output
DEBUG:dffml.MemoryOrchestratorContext:[8151bcd1f0d1b50ad343b92b59dd60d47de85149989949c87b0e6af569065a93]: dispatch operation: print_output
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: print_output
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'data': 'Testing print_output'}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: None
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
ok
test_run (tests.operation.test_dataflow.TestRunDataFlowOnRecord) ... DEBUG:dffml.MemoryInputNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationNetwork:MemoryOperationNetworkConfig(operations=[])
DEBUG:dffml.MemoryLockNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
DEBUG:dffml.MemoryRedundancyChecker:BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))
DEBUG:dffml.MemoryOrchestrator:MemoryOrchestratorConfig(input_network=MemoryInputNetwork(BaseConfig()), operation_network=MemoryOperationNetwork(MemoryOperationNetworkConfig(operations=[])), lock_network=MemoryLockNetwork(BaseConfig()), opimp_network=MemoryOperationImplementationNetwork(MemoryOperationImplementationNetworkConfig(operations={})), rchecker=MemoryRedundancyChecker(BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))))
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf046f490>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation run_dataflow(dffml.dataflow.run) with provided config RunDataFlowConfig(dataflow=<dffml.df.types.DataFlow object at 0x7f6cf199f890>)
DEBUG:dffml.DffmlDataflowRunImplementation:RunDataFlowConfig(dataflow=<dffml.df.types.DataFlow object at 0x7f6cf199f890>)
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation get_single(get_single) with base config
DEBUG:dffml.GetSingleImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf046f490>: ({'add_op': [Input(value={'add_op': [{'value': 'add 40 and 2', 'definition': 'calc_string'}, {'value': ['result'], 'definition': 'get_single_spec'}]}, definition=flow_inputs)], 'mult_op': [Input(value={'mult_op': [{'value': 'multiply 42 and 10', 'definition': 'calc_string'}, {'value': ['result'], 'definition': 'get_single_spec'}]}, definition=flow_inputs)]},)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value={'add_op': [{'value': 'add 40 and 2', 'definition': 'calc_string'}, {'value': ['result'], 'definition': 'get_single_spec'}]}, definition=flow_inputs)]
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value={'mult_op': [{'value': 'multiply 42 and 10', 'definition': 'calc_string'}, {'value': ['result'], 'definition': 'get_single_spec'}]}, definition=flow_inputs)]
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: add_op
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: mult_op
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] run_dataflow
DEBUG:dffml.MemoryOrchestratorContext:[add_op]: dispatch operation: run_dataflow
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] run_dataflow
DEBUG:dffml.MemoryOrchestratorContext:[mult_op]: dispatch operation: run_dataflow
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: run_dataflow
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'inputs': {'add_op': [{'value': 'add 40 and 2', 'definition': 'calc_string'}, {'value': ['result'], 'definition': 'get_single_spec'}]}}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf199f890>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation get_single(get_single) with base config
DEBUG:dffml.GetSingleImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation add(add) with base config
DEBUG:dffml.AddImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation mult(mult) with base config
DEBUG:dffml.MultImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation parse_line(parse_line) with base config
DEBUG:dffml.ParseLineImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf199f890>: ({'add_op': [Input(value=add 40 and 2, definition=calc_string), Input(value=['result'], definition=get_single_spec)]},)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=add 40 and 2, definition=calc_string), Input(value=['result'], definition=get_single_spec)]
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: add_op
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: run_dataflow
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'inputs': {'mult_op': [{'value': 'multiply 42 and 10', 'definition': 'calc_string'}, {'value': ['result'], 'definition': 'get_single_spec'}]}}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf199f890>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation get_single(get_single) with base config
DEBUG:dffml.GetSingleImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation add(add) with base config
DEBUG:dffml.AddImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation mult(mult) with base config
DEBUG:dffml.MultImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation parse_line(parse_line) with base config
DEBUG:dffml.ParseLineImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf199f890>: ({'mult_op': [Input(value=multiply 42 and 10, definition=calc_string), Input(value=['result'], definition=get_single_spec)]},)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=multiply 42 and 10, definition=calc_string), Input(value=['result'], definition=get_single_spec)]
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: mult_op
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] parse_line
DEBUG:dffml.MemoryOrchestratorContext:[add_op]: dispatch operation: parse_line
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] parse_line
DEBUG:dffml.MemoryOrchestratorContext:[mult_op]: dispatch operation: parse_line
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: parse_line
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'line': 'add 40 and 2'}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'add': True, 'mult': False, 'numbers': [40, 2]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: parse_line
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'line': 'multiply 42 and 10'}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'add': False, 'mult': True, 'numbers': [42, 10]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] add
DEBUG:dffml.MemoryOrchestratorContext:[add_op]: dispatch operation: add
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] mult
DEBUG:dffml.MemoryOrchestratorContext:[mult_op]: dispatch operation: mult
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: add
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'numbers': [40, 2]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {'is_add': True}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'sum': 42}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: mult
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'numbers': [42, 10]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {'is_mult': True}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'product': 420}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: get_single
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': ['result']}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.GetSingle:output spec: [result]
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'result': 42}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: get_single
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': ['result']}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.GetSingle:output spec: [result]
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'result': 420}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'results': [{'add_op': {'result': 42}}]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'results': [{'mult_op': {'result': 420}}]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: get_single
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': ['flow_results']}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.GetSingle:output spec: [flow_results]
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'flow_results': {'add_op': {'result': 42}}}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: get_single
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': ['flow_results']}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.GetSingle:output spec: [flow_results]
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'flow_results': {'mult_op': {'result': 420}}}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
ok
test_run_custom (tests.operation.test_dataflow.TestRunDataFlowOnRecord) ... DEBUG:dffml.MemoryInputNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationNetwork:MemoryOperationNetworkConfig(operations=[])
DEBUG:dffml.MemoryLockNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
DEBUG:dffml.MemoryRedundancyChecker:BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))
DEBUG:dffml.MemoryOrchestrator:MemoryOrchestratorConfig(input_network=MemoryInputNetwork(BaseConfig()), operation_network=MemoryOperationNetwork(MemoryOperationNetworkConfig(operations=[])), lock_network=MemoryLockNetwork(BaseConfig()), opimp_network=MemoryOperationImplementationNetwork(MemoryOperationImplementationNetworkConfig(operations={})), rchecker=MemoryRedundancyChecker(BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))))
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf046fbd0>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation run_dataflow(dffml.dataflow.run) with provided config RunDataFlowConfig(dataflow=<dffml.df.types.DataFlow object at 0x7f6cf048b6d0>)
DEBUG:dffml.DffmlDataflowRunImplementation:RunDataFlowConfig(dataflow=<dffml.df.types.DataFlow object at 0x7f6cf048b6d0>)
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation get_single(get_single) with base config
DEBUG:dffml.GetSingleImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf046fbd0>: ({'add 40 and 2': [Input(value=add 40 and 2, definition=calc_string)], 'multiply 42 and 10': [Input(value=multiply 42 and 10, definition=calc_string)]},)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=add 40 and 2, definition=calc_string)]
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=multiply 42 and 10, definition=calc_string)]
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: add 40 and 2
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: multiply 42 and 10
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] run_dataflow
DEBUG:dffml.MemoryOrchestratorContext:[add 40 and 2]: dispatch operation: run_dataflow
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] run_dataflow
DEBUG:dffml.MemoryOrchestratorContext:[multiply 42 and 10]: dispatch operation: run_dataflow
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: run_dataflow
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'line': 'add 40 and 2'}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf048b6d0>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation get_single(get_single) with base config
DEBUG:dffml.GetSingleImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation add(add) with base config
DEBUG:dffml.AddImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation mult(mult) with base config
DEBUG:dffml.MultImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation parse_line(parse_line) with base config
DEBUG:dffml.ParseLineImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf048b6d0>: ({'add 40 and 2': [Input(value=add 40 and 2, definition=calc_string)]},)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=add 40 and 2, definition=calc_string)]
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: add 40 and 2
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: run_dataflow
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'line': 'multiply 42 and 10'}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf048b6d0>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation get_single(get_single) with base config
DEBUG:dffml.GetSingleImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation add(add) with base config
DEBUG:dffml.AddImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation mult(mult) with base config
DEBUG:dffml.MultImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation parse_line(parse_line) with base config
DEBUG:dffml.ParseLineImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf048b6d0>: ({'multiply 42 and 10': [Input(value=multiply 42 and 10, definition=calc_string)]},)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=multiply 42 and 10, definition=calc_string)]
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: multiply 42 and 10
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] parse_line
DEBUG:dffml.MemoryOrchestratorContext:[add 40 and 2]: dispatch operation: parse_line
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] parse_line
DEBUG:dffml.MemoryOrchestratorContext:[multiply 42 and 10]: dispatch operation: parse_line
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: parse_line
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'line': 'add 40 and 2'}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'add': True, 'mult': False, 'numbers': [40, 2]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: parse_line
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'line': 'multiply 42 and 10'}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'add': False, 'mult': True, 'numbers': [42, 10]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] add
DEBUG:dffml.MemoryOrchestratorContext:[add 40 and 2]: dispatch operation: add
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] mult
DEBUG:dffml.MemoryOrchestratorContext:[multiply 42 and 10]: dispatch operation: mult
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: add
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'numbers': [40, 2]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {'is_add': True}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'sum': 42}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: mult
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'numbers': [42, 10]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {'is_mult': True}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'product': 420}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: get_single
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': ['result']}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.GetSingle:output spec: [result]
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'result': 42}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: get_single
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': ['result']}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.GetSingle:output spec: [result]
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'result': 420}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'result': 42}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'result': 420}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: get_single
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': ['result']}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.GetSingle:output spec: [result]
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'result': 42}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: get_single
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': ['result']}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.GetSingle:output spec: [result]
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'result': 420}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
ok
test_create (tests.operation.test_mapping.TestMapping) ... ok
test_extract_value (tests.operation.test_mapping.TestMapping) ... ok
test_associatedefinition (tests.operation.test_associatedef.TestAssociateDefinition) ... DEBUG:dffml.MemoryInputNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationNetwork:MemoryOperationNetworkConfig(operations=[])
DEBUG:dffml.MemoryLockNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
DEBUG:dffml.MemoryRedundancyChecker:BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))
DEBUG:dffml.MemoryOrchestrator:MemoryOrchestratorConfig(input_network=MemoryInputNetwork(BaseConfig()), operation_network=MemoryOperationNetwork(MemoryOperationNetworkConfig(operations=[])), lock_network=MemoryLockNetwork(BaseConfig()), opimp_network=MemoryOperationImplementationNetwork(MemoryOperationImplementationNetworkConfig(operations={})), rchecker=MemoryRedundancyChecker(BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))))
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf046f490>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation associate_definition(associate_definition) with base config
DEBUG:dffml.AssociateDefinitionImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf046f490>: ([Input(value=my favorite value, definition=feed), Input(value=face, definition=output), Input(value=my second favorite value, definition=dead), Input(value=beef, definition=output), Input(value={'feed': 'output'}, definition=associate_spec)],)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=my favorite value, definition=feed), Input(value=face, definition=output), Input(value=my second favorite value, definition=dead), Input(value=beef, definition=output), Input(value={'feed': 'output'}, definition=associate_spec)]
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: 3470767562bb6b2d01720e78c310b5597c2e509afb9b375bc38fcbfb87329150
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: associate_definition
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': {'feed': 'output'}}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'feed': 'face'}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
DEBUG:dffml.MemoryInputNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationNetwork:MemoryOperationNetworkConfig(operations=[])
DEBUG:dffml.MemoryLockNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
DEBUG:dffml.MemoryRedundancyChecker:BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))
DEBUG:dffml.MemoryOrchestrator:MemoryOrchestratorConfig(input_network=MemoryInputNetwork(BaseConfig()), operation_network=MemoryOperationNetwork(MemoryOperationNetworkConfig(operations=[])), lock_network=MemoryLockNetwork(BaseConfig()), opimp_network=MemoryOperationImplementationNetwork(MemoryOperationImplementationNetworkConfig(operations={})), rchecker=MemoryRedundancyChecker(BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))))
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf046fbd0>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation associate_definition(associate_definition) with base config
DEBUG:dffml.AssociateDefinitionImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf046fbd0>: ([Input(value=my favorite value, definition=feed), Input(value=face, definition=output), Input(value=my second favorite value, definition=dead), Input(value=beef, definition=output), Input(value={'dead': 'output'}, definition=associate_spec)],)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=my favorite value, definition=feed), Input(value=face, definition=output), Input(value=my second favorite value, definition=dead), Input(value=beef, definition=output), Input(value={'dead': 'output'}, definition=associate_spec)]
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: b0ebd149bbf2f57644c839217173656c7274fb325d1d26de06927b50ea1b9591
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: associate_definition
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': {'dead': 'output'}}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'dead': 'beef'}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
ok
test_literal_eval (tests.operation.test_preprocess.TestPreprocess) ... ok
test_run (tests.test_high_level.TestDataFlow) ... DEBUG:dffml.MemoryInputNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationNetwork:MemoryOperationNetworkConfig(operations=[])
DEBUG:dffml.MemoryLockNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
DEBUG:dffml.MemoryRedundancyChecker:BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))
DEBUG:dffml.MemoryOrchestrator:MemoryOrchestratorConfig(input_network=MemoryInputNetwork(BaseConfig()), operation_network=MemoryOperationNetwork(MemoryOperationNetworkConfig(operations=[])), lock_network=MemoryLockNetwork(BaseConfig()), opimp_network=MemoryOperationImplementationNetwork(MemoryOperationImplementationNetworkConfig(operations={})), rchecker=MemoryRedundancyChecker(BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))))
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf199f890>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation get_single(get_single) with base config
DEBUG:dffml.GetSingleImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation add(add) with base config
DEBUG:dffml.AddImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation mult(mult) with base config
DEBUG:dffml.MultImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation parse_line(parse_line) with base config
DEBUG:dffml.ParseLineImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf199f890>: ({'add 40 and 2': [Input(value=add 40 and 2, definition=calc_string), Input(value=['result'], definition=get_single_spec)], 'multiply 42 and 10': [Input(value=multiply 42 and 10, definition=calc_string), Input(value=['result'], definition=get_single_spec)]},)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=add 40 and 2, definition=calc_string), Input(value=['result'], definition=get_single_spec)]
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=multiply 42 and 10, definition=calc_string), Input(value=['result'], definition=get_single_spec)]
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: add 40 and 2
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: multiply 42 and 10
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] parse_line
DEBUG:dffml.MemoryOrchestratorContext:[add 40 and 2]: dispatch operation: parse_line
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] parse_line
DEBUG:dffml.MemoryOrchestratorContext:[multiply 42 and 10]: dispatch operation: parse_line
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: parse_line
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'line': 'add 40 and 2'}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'add': True, 'mult': False, 'numbers': [40, 2]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: parse_line
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'line': 'multiply 42 and 10'}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'add': False, 'mult': True, 'numbers': [42, 10]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] add
DEBUG:dffml.MemoryOrchestratorContext:[add 40 and 2]: dispatch operation: add
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] mult
DEBUG:dffml.MemoryOrchestratorContext:[multiply 42 and 10]: dispatch operation: mult
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: add
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'numbers': [40, 2]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {'is_add': True}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'sum': 42}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: mult
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'numbers': [42, 10]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {'is_mult': True}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'product': 420}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: get_single
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': ['result']}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.GetSingle:output spec: [result]
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'result': 42}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: get_single
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': ['result']}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.GetSingle:output spec: [result]
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'result': 420}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
DEBUG:dffml.MemoryInputNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationNetwork:MemoryOperationNetworkConfig(operations=[])
DEBUG:dffml.MemoryLockNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
DEBUG:dffml.MemoryRedundancyChecker:BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))
DEBUG:dffml.MemoryOrchestrator:MemoryOrchestratorConfig(input_network=MemoryInputNetwork(BaseConfig()), operation_network=MemoryOperationNetwork(MemoryOperationNetworkConfig(operations=[])), lock_network=MemoryLockNetwork(BaseConfig()), opimp_network=MemoryOperationImplementationNetwork(MemoryOperationImplementationNetworkConfig(operations={})), rchecker=MemoryRedundancyChecker(BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))))
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf199f890>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation get_single(get_single) with base config
DEBUG:dffml.GetSingleImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation add(add) with base config
DEBUG:dffml.AddImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation mult(mult) with base config
DEBUG:dffml.MultImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation parse_line(parse_line) with base config
DEBUG:dffml.ParseLineImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf199f890>: (<dffml.df.memory.MemoryInputSet object at 0x7f6cf17555d0>, <dffml.df.memory.MemoryInputSet object at 0x7f6cf048b1d0>)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: <dffml.df.memory.MemoryInputSet object at 0x7f6cf17555d0>
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: <dffml.df.memory.MemoryInputSet object at 0x7f6cf048b1d0>
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: add 40 and 2
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: multiply 42 and 10
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] parse_line
DEBUG:dffml.MemoryOrchestratorContext:[add 40 and 2]: dispatch operation: parse_line
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] parse_line
DEBUG:dffml.MemoryOrchestratorContext:[multiply 42 and 10]: dispatch operation: parse_line
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: parse_line
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'line': 'add 40 and 2'}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'add': True, 'mult': False, 'numbers': [40, 2]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: parse_line
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'line': 'multiply 42 and 10'}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'add': False, 'mult': True, 'numbers': [42, 10]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] add
DEBUG:dffml.MemoryOrchestratorContext:[add 40 and 2]: dispatch operation: add
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] mult
DEBUG:dffml.MemoryOrchestratorContext:[multiply 42 and 10]: dispatch operation: mult
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: add
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'numbers': [40, 2]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {'is_add': True}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'sum': 42}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: mult
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'numbers': [42, 10]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {'is_mult': True}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'product': 420}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: get_single
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': ['result']}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.GetSingle:output spec: [result]
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'result': 42}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: get_single
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': ['result']}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.GetSingle:output spec: [result]
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'result': 420}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
DEBUG:dffml.MemoryInputNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationNetwork:MemoryOperationNetworkConfig(operations=[])
DEBUG:dffml.MemoryLockNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
DEBUG:dffml.MemoryRedundancyChecker:BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))
DEBUG:dffml.MemoryOrchestrator:MemoryOrchestratorConfig(input_network=MemoryInputNetwork(BaseConfig()), operation_network=MemoryOperationNetwork(MemoryOperationNetworkConfig(operations=[])), lock_network=MemoryLockNetwork(BaseConfig()), opimp_network=MemoryOperationImplementationNetwork(MemoryOperationImplementationNetworkConfig(operations={})), rchecker=MemoryRedundancyChecker(BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))))
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf199f890>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation get_single(get_single) with base config
DEBUG:dffml.GetSingleImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation add(add) with base config
DEBUG:dffml.AddImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation mult(mult) with base config
DEBUG:dffml.MultImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation parse_line(parse_line) with base config
DEBUG:dffml.ParseLineImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf199f890>: ([Input(value=add 40 and 2, definition=calc_string), Input(value=['result'], definition=get_single_spec)], [Input(value=multiply 42 and 10, definition=calc_string), Input(value=['result'], definition=get_single_spec)])
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=add 40 and 2, definition=calc_string), Input(value=['result'], definition=get_single_spec)]
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=multiply 42 and 10, definition=calc_string), Input(value=['result'], definition=get_single_spec)]
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: 6b7381de51997626cc28211a44bc4be50999ef0b8f22b58492d38ac9607b4715
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: b02bfa9728b256878eb63cc3b13588ad4d01f14e96ff00eeb032d19c41c04829
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] parse_line
DEBUG:dffml.MemoryOrchestratorContext:[6b7381de51997626cc28211a44bc4be50999ef0b8f22b58492d38ac9607b4715]: dispatch operation: parse_line
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] parse_line
DEBUG:dffml.MemoryOrchestratorContext:[b02bfa9728b256878eb63cc3b13588ad4d01f14e96ff00eeb032d19c41c04829]: dispatch operation: parse_line
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: parse_line
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'line': 'add 40 and 2'}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'add': True, 'mult': False, 'numbers': [40, 2]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: parse_line
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'line': 'multiply 42 and 10'}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'add': False, 'mult': True, 'numbers': [42, 10]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] add
DEBUG:dffml.MemoryOrchestratorContext:[6b7381de51997626cc28211a44bc4be50999ef0b8f22b58492d38ac9607b4715]: dispatch operation: add
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] mult
DEBUG:dffml.MemoryOrchestratorContext:[b02bfa9728b256878eb63cc3b13588ad4d01f14e96ff00eeb032d19c41c04829]: dispatch operation: mult
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: add
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'numbers': [40, 2]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {'is_add': True}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'sum': 42}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: mult
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'numbers': [42, 10]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {'is_mult': True}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'product': 420}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: get_single
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': ['result']}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.GetSingle:output spec: [result]
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'result': 42}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: get_single
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': ['result']}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.GetSingle:output spec: [result]
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'result': 420}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
ok
test_predict (tests.test_high_level.TestML) ... DEBUG:dffml.CSVSource:CSVSourceConfig(filename='/tmp/tmpcxqml7e9/0.7313858493492768.csv', tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)
DEBUG:dffml.CSVSource:PosixPath('/tmp/tmpcxqml7e9/0.7313858493492768.csv') is not a file, initializing memory to empty dict
DEBUG:dffml.CSVSource:/tmp/tmpcxqml7e9/0.7313858493492768.csv first open
DEBUG:dffml.CSVSource:fieldnames: ['key', 'tag', 'Expertise', 'Salary', 'Trust', 'Years']
DEBUG:dffml.CSVSource:/tmp/tmpcxqml7e9/0.7313858493492768.csv written
DEBUG:dffml.CSVSource:CSVSource(CSVSourceConfig(filename=PosixPath('/tmp/tmpcxqml7e9/0.7313858493492768.csv'), tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)) saved 4 records
DEBUG:dffml.CSVSource:CSVSourceConfig(filename='/tmp/tmpqmnpr0j_/0.05367070548951791.csv', tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)
DEBUG:dffml.CSVSource:PosixPath('/tmp/tmpqmnpr0j_/0.05367070548951791.csv') is not a file, initializing memory to empty dict
DEBUG:dffml.CSVSource:/tmp/tmpqmnpr0j_/0.05367070548951791.csv first open
DEBUG:dffml.CSVSource:fieldnames: ['key', 'tag', 'Expertise', 'Salary', 'Trust', 'Years']
DEBUG:dffml.CSVSource:/tmp/tmpqmnpr0j_/0.05367070548951791.csv written
DEBUG:dffml.CSVSource:CSVSource(CSVSourceConfig(filename=PosixPath('/tmp/tmpqmnpr0j_/0.05367070548951791.csv'), tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)) saved 2 records
DEBUG:dffml.CSVSource:CSVSourceConfig(filename='/tmp/tmpmewn6in1/0.024526865442471957.csv', tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)
DEBUG:dffml.CSVSource:PosixPath('/tmp/tmpmewn6in1/0.024526865442471957.csv') is not a file, initializing memory to empty dict
DEBUG:dffml.CSVSource:/tmp/tmpmewn6in1/0.024526865442471957.csv first open
DEBUG:dffml.CSVSource:fieldnames: ['key', 'tag', 'Expertise', 'Trust', 'Years']
DEBUG:dffml.CSVSource:/tmp/tmpmewn6in1/0.024526865442471957.csv written
DEBUG:dffml.CSVSource:CSVSource(CSVSourceConfig(filename=PosixPath('/tmp/tmpmewn6in1/0.024526865442471957.csv'), tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)) saved 2 records
skipped 'Required plugins: dffml-model-scikit must be installed in development mode'
test_save_and_load (tests.test_high_level.TestML) ... DEBUG:dffml.CSVSource:CSVSourceConfig(filename='/tmp/tmpjnjo_ntb/0.5254588464118608.csv', tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)
DEBUG:dffml.CSVSource:PosixPath('/tmp/tmpjnjo_ntb/0.5254588464118608.csv') is not a file, initializing memory to empty dict
DEBUG:dffml.CSVSource:/tmp/tmpjnjo_ntb/0.5254588464118608.csv first open
DEBUG:dffml.CSVSource:fieldnames: ['key', 'tag', 'Expertise', 'Salary', 'Trust', 'Years']
DEBUG:dffml.CSVSource:/tmp/tmpjnjo_ntb/0.5254588464118608.csv written
DEBUG:dffml.CSVSource:CSVSource(CSVSourceConfig(filename=PosixPath('/tmp/tmpjnjo_ntb/0.5254588464118608.csv'), tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)) saved 4 records
DEBUG:dffml.CSVSource:CSVSourceConfig(filename='/tmp/tmp7se3wg1v/0.7422900951891598.csv', tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)
DEBUG:dffml.CSVSource:PosixPath('/tmp/tmp7se3wg1v/0.7422900951891598.csv') is not a file, initializing memory to empty dict
DEBUG:dffml.CSVSource:/tmp/tmp7se3wg1v/0.7422900951891598.csv first open
DEBUG:dffml.CSVSource:fieldnames: ['key', 'tag', 'Expertise', 'Salary', 'Trust', 'Years']
DEBUG:dffml.CSVSource:/tmp/tmp7se3wg1v/0.7422900951891598.csv written
DEBUG:dffml.CSVSource:CSVSource(CSVSourceConfig(filename=PosixPath('/tmp/tmp7se3wg1v/0.7422900951891598.csv'), tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)) saved 2 records
DEBUG:dffml.CSVSource:CSVSourceConfig(filename='/tmp/tmp_1ujlumo/0.37404401383219366.csv', tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)
DEBUG:dffml.CSVSource:PosixPath('/tmp/tmp_1ujlumo/0.37404401383219366.csv') is not a file, initializing memory to empty dict
DEBUG:dffml.CSVSource:/tmp/tmp_1ujlumo/0.37404401383219366.csv first open
DEBUG:dffml.CSVSource:fieldnames: ['key', 'tag', 'Expertise', 'Trust', 'Years']
DEBUG:dffml.CSVSource:/tmp/tmp_1ujlumo/0.37404401383219366.csv written
DEBUG:dffml.CSVSource:CSVSource(CSVSourceConfig(filename=PosixPath('/tmp/tmp_1ujlumo/0.37404401383219366.csv'), tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)) saved 2 records
DEBUG:dffml.CSVSource:CSVSourceConfig(filename='/tmp/tmpw38hu930/0.5506216610819294.csv', tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)
DEBUG:dffml.CSVSource:PosixPath('/tmp/tmpw38hu930/0.5506216610819294.csv') is not a file, initializing memory to empty dict
DEBUG:dffml.CSVSource:/tmp/tmpw38hu930/0.5506216610819294.csv first open
DEBUG:dffml.CSVSource:fieldnames: ['key', 'tag', 'A', 'B', 'prediction_C', 'confidence_C']
DEBUG:dffml.CSVSource:/tmp/tmpw38hu930/0.5506216610819294.csv written
DEBUG:dffml.CSVSource:CSVSource(CSVSourceConfig(filename=PosixPath('/tmp/tmpw38hu930/0.5506216610819294.csv'), tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)) saved 2 records
DEBUG:dffml.CSVSource:/tmp/tmpw38hu930/0.5506216610819294.csv first open
DEBUG:dffml.CSVSource:CSVSource(CSVSourceConfig(filename=PosixPath('/tmp/tmpw38hu930/0.5506216610819294.csv'), tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)) loaded 2 records
DEBUG:dffml.CSVSource:fieldnames: ['key', 'tag', 'A', 'B', 'prediction_C', 'confidence_C']
DEBUG:dffml.CSVSource:/tmp/tmpw38hu930/0.5506216610819294.csv written
DEBUG:dffml.CSVSource:CSVSource(CSVSourceConfig(filename=PosixPath('/tmp/tmpw38hu930/0.5506216610819294.csv'), tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)) saved 2 records
DEBUG:dffml.CSVSource:/tmp/tmpw38hu930/0.5506216610819294.csv first open
DEBUG:dffml.CSVSource:CSVSource(CSVSourceConfig(filename=PosixPath('/tmp/tmpw38hu930/0.5506216610819294.csv'), tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)) loaded 2 records
DEBUG:dffml.CSVSource:fieldnames: ['key', 'tag', 'A', 'B', 'prediction_C', 'confidence_C']
DEBUG:dffml.CSVSource:/tmp/tmpw38hu930/0.5506216610819294.csv written
DEBUG:dffml.CSVSource:CSVSource(CSVSourceConfig(filename=PosixPath('/tmp/tmpw38hu930/0.5506216610819294.csv'), tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)) saved 2 records
ok
test_run (tests.test_df.TestOrchestrator) ... DEBUG:dffml.MemoryInputNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationNetwork:MemoryOperationNetworkConfig(operations=[])
DEBUG:dffml.MemoryLockNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
DEBUG:dffml.MemoryRedundancyChecker:BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))
DEBUG:dffml.MemoryOrchestrator:MemoryOrchestratorConfig(input_network=MemoryInputNetwork(BaseConfig()), operation_network=MemoryOperationNetwork(MemoryOperationNetworkConfig(operations=[])), lock_network=MemoryLockNetwork(BaseConfig()), opimp_network=MemoryOperationImplementationNetwork(MemoryOperationImplementationNetworkConfig(operations={})), rchecker=MemoryRedundancyChecker(BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))))
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf199f890>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation get_single(get_single) with base config
DEBUG:dffml.GetSingleImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation add(add) with base config
DEBUG:dffml.AddImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation mult(mult) with base config
DEBUG:dffml.MultImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation parse_line(parse_line) with base config
DEBUG:dffml.ParseLineImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf199f890>: ({'add 40 and 2': [Input(value=add 40 and 2, definition=calc_string), Input(value=['result'], definition=get_single_spec)], 'multiply 42 and 10': [Input(value=multiply 42 and 10, definition=calc_string), Input(value=['result'], definition=get_single_spec)]},)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=add 40 and 2, definition=calc_string), Input(value=['result'], definition=get_single_spec)]
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=multiply 42 and 10, definition=calc_string), Input(value=['result'], definition=get_single_spec)]
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: add 40 and 2
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: multiply 42 and 10
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] parse_line
DEBUG:dffml.MemoryOrchestratorContext:[add 40 and 2]: dispatch operation: parse_line
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] parse_line
DEBUG:dffml.MemoryOrchestratorContext:[multiply 42 and 10]: dispatch operation: parse_line
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: parse_line
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'line': 'add 40 and 2'}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'add': True, 'mult': False, 'numbers': [40, 2]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: parse_line
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'line': 'multiply 42 and 10'}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'add': False, 'mult': True, 'numbers': [42, 10]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] add
DEBUG:dffml.MemoryOrchestratorContext:[add 40 and 2]: dispatch operation: add
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] mult
DEBUG:dffml.MemoryOrchestratorContext:[multiply 42 and 10]: dispatch operation: mult
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: add
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'numbers': [40, 2]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {'is_add': True}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'sum': 42}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: mult
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'numbers': [42, 10]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {'is_mult': True}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'product': 420}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: get_single
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': ['result']}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.GetSingle:output spec: [result]
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'result': 42}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: get_single
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': ['result']}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.GetSingle:output spec: [result]
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'result': 420}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf199f890>: (<dffml.df.memory.MemoryInputSet object at 0x7f6cf175b9d0>, <dffml.df.memory.MemoryInputSet object at 0x7f6cf0479110>)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: <dffml.df.memory.MemoryInputSet object at 0x7f6cf175b9d0>
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: <dffml.df.memory.MemoryInputSet object at 0x7f6cf0479110>
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: add 40 and 2
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: multiply 42 and 10
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] parse_line
DEBUG:dffml.MemoryOrchestratorContext:[add 40 and 2]: dispatch operation: parse_line
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] parse_line
DEBUG:dffml.MemoryOrchestratorContext:[multiply 42 and 10]: dispatch operation: parse_line
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: parse_line
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'line': 'add 40 and 2'}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'add': True, 'mult': False, 'numbers': [40, 2]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: parse_line
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'line': 'multiply 42 and 10'}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'add': False, 'mult': True, 'numbers': [42, 10]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] add
DEBUG:dffml.MemoryOrchestratorContext:[add 40 and 2]: dispatch operation: add
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] mult
DEBUG:dffml.MemoryOrchestratorContext:[multiply 42 and 10]: dispatch operation: mult
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: add
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'numbers': [40, 2]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {'is_add': True}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'sum': 42}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: mult
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'numbers': [42, 10]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {'is_mult': True}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'product': 420}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: get_single
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': ['result']}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.GetSingle:output spec: [result]
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'result': 42}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: get_single
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': ['result']}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.GetSingle:output spec: [result]
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'result': 420}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf199f890>: ([Input(value=add 40 and 2, definition=calc_string), Input(value=['result'], definition=get_single_spec)], [Input(value=multiply 42 and 10, definition=calc_string), Input(value=['result'], definition=get_single_spec)])
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=add 40 and 2, definition=calc_string), Input(value=['result'], definition=get_single_spec)]
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=multiply 42 and 10, definition=calc_string), Input(value=['result'], definition=get_single_spec)]
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: 410258145f6d4c0ef918d98a4620850284aef4551869b3bcbf71bcef196d5880
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: 2920c1d214d20829c188d5951b23eb4fb590292eb476cfb096218121d94edaa0
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] parse_line
DEBUG:dffml.MemoryOrchestratorContext:[410258145f6d4c0ef918d98a4620850284aef4551869b3bcbf71bcef196d5880]: dispatch operation: parse_line
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] parse_line
DEBUG:dffml.MemoryOrchestratorContext:[2920c1d214d20829c188d5951b23eb4fb590292eb476cfb096218121d94edaa0]: dispatch operation: parse_line
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: parse_line
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'line': 'add 40 and 2'}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'add': True, 'mult': False, 'numbers': [40, 2]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: parse_line
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'line': 'multiply 42 and 10'}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'add': False, 'mult': True, 'numbers': [42, 10]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] add
DEBUG:dffml.MemoryOrchestratorContext:[410258145f6d4c0ef918d98a4620850284aef4551869b3bcbf71bcef196d5880]: dispatch operation: add
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] mult
DEBUG:dffml.MemoryOrchestratorContext:[2920c1d214d20829c188d5951b23eb4fb590292eb476cfb096218121d94edaa0]: dispatch operation: mult
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: add
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'numbers': [40, 2]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {'is_add': True}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'sum': 42}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: mult
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'numbers': [42, 10]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {'is_mult': True}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'product': 420}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: get_single
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': ['result']}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.GetSingle:output spec: [result]
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'result': 42}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: get_single
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': ['result']}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.GetSingle:output spec: [result]
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'result': 420}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
ok
test_docstring (tests.test_docstrings.base_BaseDataFlowFacilitatorObject) ... Enter the value: Finding tests in BaseDataFlowFacilitatorObject
DEBUG:asyncio:Using selector: EpollSelector
DEBUG:dffml.Object:BaseConfig()
ok
test_docstring (tests.test_docstrings.configloader_configloader_BaseConfigLoader) ... Finding tests in BaseConfigLoader
ok
test_docstring (tests.test_docstrings.configloader_json_JSONConfigLoader) ... Finding tests in JSONConfigLoader
ok
test_docstring (tests.test_docstrings.df_base_OperationImplementationContext_OperationImplementationContext_subflow) ... Finding tests in OperationImplementationContext.subflow
ok
test_docstring (tests.test_docstrings.df_base_op) ... Finding tests in op
ok
test_docstring (tests.test_docstrings.df_memory_MemoryInputNetworkContext_MemoryInputNetworkContext_cadd) ... Finding tests in MemoryInputNetworkContext.cadd
DEBUG:asyncio:Using selector: EpollSelector
DEBUG:dffml.MemoryInputNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationNetwork:MemoryOperationNetworkConfig(operations=[])
DEBUG:dffml.MemoryLockNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
DEBUG:dffml.MemoryRedundancyChecker:BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))
DEBUG:dffml.MemoryOrchestrator:MemoryOrchestratorConfig(input_network=MemoryInputNetwork(BaseConfig()), operation_network=MemoryOperationNetwork(MemoryOperationNetworkConfig(operations=[])), lock_network=MemoryLockNetwork(BaseConfig()), opimp_network=MemoryOperationImplementationNetwork(MemoryOperationImplementationNetworkConfig(operations={})), rchecker=MemoryRedundancyChecker(BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))))
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf1450250>
ok
test_docstring (tests.test_docstrings.df_memory_MemoryInputNetworkContext_MemoryInputNetworkContext_sadd) ... Finding tests in MemoryInputNetworkContext.sadd
DEBUG:asyncio:Using selector: EpollSelector
DEBUG:dffml.MemoryInputNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationNetwork:MemoryOperationNetworkConfig(operations=[])
DEBUG:dffml.MemoryLockNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
DEBUG:dffml.MemoryRedundancyChecker:BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))
DEBUG:dffml.MemoryOrchestrator:MemoryOrchestratorConfig(input_network=MemoryInputNetwork(BaseConfig()), operation_network=MemoryOperationNetwork(MemoryOperationNetworkConfig(operations=[])), lock_network=MemoryLockNetwork(BaseConfig()), opimp_network=MemoryOperationImplementationNetwork(MemoryOperationImplementationNetworkConfig(operations={})), rchecker=MemoryRedundancyChecker(BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))))
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf144e1d0>
ok
test_docstring (tests.test_docstrings.df_types_Definition) ... Finding tests in Definition
ok
test_docstring (tests.test_docstrings.feature_feature_Feature) ... Finding tests in Feature
ok
test_docstring (tests.test_docstrings.feature_feature_Features_Feature) ... Finding tests in Feature
ok
test_docstring (tests.test_docstrings.high_level_accuracy) ... DEBUG:dffml.SLRModel:SLRModelConfig(predict=Salary(Feature)[<class 'int'>, 1], features=[Years(Feature)[<class 'int'>, 1]], directory=PosixPath('~/.cache/dffml/slr'))
DEBUG:asyncio:Using selector: EpollSelector
DEBUG:dffml.MemorySource:MemorySourceConfig(records=[{'key': 0, 'features': {'Years': 0, 'Salary': 10}, 'extra': {}}, {'key': 1, 'features': {'Years': 1, 'Salary': 20}, 'extra': {}}, {'key': 2, 'features': {'Years': 2, 'Salary': 30}, 'extra': {}}, {'key': 3, 'features': {'Years': 3, 'Salary': 40}, 'extra': {}}])
DEBUG:dffml.util.AsyncContextManagerList.Sources:Entering: MemorySource(MemorySourceConfig(records=[{'key': 0, 'features': {'Years': 0, 'Salary': 10}, 'extra': {}}, {'key': 1, 'features': {'Years': 1, 'Salary': 20}, 'extra': {}}, {'key': 2, 'features': {'Years': 2, 'Salary': 30}, 'extra': {}}, {'key': 3, 'features': {'Years': 3, 'Salary': 40}, 'extra': {}}]))
DEBUG:dffml.SLRModel:No saved model in /tmp/tmp.tPXahPUaJ4/.cache/dffml/slr/90e67083b95825495dc0d2ec594f021e5aa9a5a6e9ddf30907a54b4d8a53e69f23911a1adb02d8679df84da8cf073721.json
DEBUG:dffml.util.AsyncContextManagerListContext.SourcesContext:Entering context: <dffml.source.memory.MemorySourceContext object at 0x7f6cf13dfc90>
DEBUG:dffml.SLRModel:Number of input records: 4
DEBUG:dffml.SLRModel:Saved model to /tmp/tmp.tPXahPUaJ4/.cache/dffml/slr/90e67083b95825495dc0d2ec594f021e5aa9a5a6e9ddf30907a54b4d8a53e69f23911a1adb02d8679df84da8cf073721.json
Finding tests in accuracy
DEBUG:dffml.SLRModel:SLRModelConfig(predict=Salary(Feature)[<class 'int'>, 1], features=[Years(Feature)[<class 'int'>, 1]], directory=PosixPath('~/.cache/dffml/slr'))
DEBUG:asyncio:Using selector: EpollSelector
DEBUG:dffml.MemorySource:MemorySourceConfig(records=[{'key': 0, 'features': {'Years': 4, 'Salary': 50}, 'extra': {}}, {'key': 1, 'features': {'Years': 5, 'Salary': 60}, 'extra': {}}])
DEBUG:dffml.util.AsyncContextManagerList.Sources:Entering: MemorySource(MemorySourceConfig(records=[{'key': 0, 'features': {'Years': 4, 'Salary': 50}, 'extra': {}}, {'key': 1, 'features': {'Years': 5, 'Salary': 60}, 'extra': {}}]))
DEBUG:dffml.SLRModel:Loaded model from /tmp/tmp.tPXahPUaJ4/.cache/dffml/slr/90e67083b95825495dc0d2ec594f021e5aa9a5a6e9ddf30907a54b4d8a53e69f23911a1adb02d8679df84da8cf073721.json
DEBUG:dffml.util.AsyncContextManagerListContext.SourcesContext:Entering context: <dffml.source.memory.MemorySourceContext object at 0x7f6cf13f4d90>
DEBUG:dffml.SLRModel:Saved model to /tmp/tmp.tPXahPUaJ4/.cache/dffml/slr/90e67083b95825495dc0d2ec594f021e5aa9a5a6e9ddf30907a54b4d8a53e69f23911a1adb02d8679df84da8cf073721.json
ok
test_docstring (tests.test_docstrings.high_level_load) ... Finding tests in load
DEBUG:dffml.CSVSource:CSVSourceConfig(filename='load.csv', tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)
DEBUG:asyncio:Using selector: EpollSelector
DEBUG:dffml.CSVSource:PosixPath('load.csv') is not a file, initializing memory to empty dict
DEBUG:dffml.CSVSource:load.csv first open
DEBUG:dffml.CSVSource:fieldnames: ['key', 'tag', 'A', 'B']
DEBUG:dffml.CSVSource:load.csv written
DEBUG:dffml.CSVSource:CSVSource(CSVSourceConfig(filename=PosixPath('load.csv'), tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)) saved 2 records
DEBUG:dffml.CSVSource:load.csv first open
DEBUG:dffml.CSVSource:CSVSource(CSVSourceConfig(filename=PosixPath('load.csv'), tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)) loaded 2 records
DEBUG:dffml.CSVSource:fieldnames: ['key', 'tag', 'A', 'B']
DEBUG:dffml.CSVSource:load.csv written
DEBUG:dffml.CSVSource:CSVSource(CSVSourceConfig(filename=PosixPath('load.csv'), tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)) saved 2 records
DEBUG:dffml.CSVSource:load.csv first open
DEBUG:dffml.CSVSource:CSVSource(CSVSourceConfig(filename=PosixPath('load.csv'), tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)) loaded 2 records
DEBUG:dffml.CSVSource:fieldnames: ['key', 'tag', 'A', 'B']
DEBUG:dffml.CSVSource:load.csv written
DEBUG:dffml.CSVSource:CSVSource(CSVSourceConfig(filename=PosixPath('load.csv'), tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)) saved 2 records
ok
test_docstring (tests.test_docstrings.high_level_predict) ... DEBUG:dffml.SLRModel:SLRModelConfig(predict=Salary(Feature)[<class 'int'>, 1], features=[Years(Feature)[<class 'int'>, 1]], directory=PosixPath('~/.cache/dffml/slr'))
DEBUG:asyncio:Using selector: EpollSelector
DEBUG:dffml.MemorySource:MemorySourceConfig(records=[{'key': 0, 'features': {'Years': 0, 'Salary': 10}, 'extra': {}}, {'key': 1, 'features': {'Years': 1, 'Salary': 20}, 'extra': {}}, {'key': 2, 'features': {'Years': 2, 'Salary': 30}, 'extra': {}}, {'key': 3, 'features': {'Years': 3, 'Salary': 40}, 'extra': {}}])
DEBUG:dffml.util.AsyncContextManagerList.Sources:Entering: MemorySource(MemorySourceConfig(records=[{'key': 0, 'features': {'Years': 0, 'Salary': 10}, 'extra': {}}, {'key': 1, 'features': {'Years': 1, 'Salary': 20}, 'extra': {}}, {'key': 2, 'features': {'Years': 2, 'Salary': 30}, 'extra': {}}, {'key': 3, 'features': {'Years': 3, 'Salary': 40}, 'extra': {}}]))
DEBUG:dffml.SLRModel:Loaded model from /tmp/tmp.tPXahPUaJ4/.cache/dffml/slr/90e67083b95825495dc0d2ec594f021e5aa9a5a6e9ddf30907a54b4d8a53e69f23911a1adb02d8679df84da8cf073721.json
DEBUG:dffml.util.AsyncContextManagerListContext.SourcesContext:Entering context: <dffml.source.memory.MemorySourceContext object at 0x7f6cf13f4d90>
DEBUG:dffml.SLRModel:Number of input records: 4
DEBUG:dffml.SLRModel:Saved model to /tmp/tmp.tPXahPUaJ4/.cache/dffml/slr/90e67083b95825495dc0d2ec594f021e5aa9a5a6e9ddf30907a54b4d8a53e69f23911a1adb02d8679df84da8cf073721.json
Finding tests in predict
DEBUG:dffml.SLRModel:SLRModelConfig(predict=Salary(Feature)[<class 'int'>, 1], features=[Years(Feature)[<class 'int'>, 1]], directory=PosixPath('~/.cache/dffml/slr'))
DEBUG:asyncio:Using selector: EpollSelector
DEBUG:dffml.MemorySource:MemorySourceConfig(records=[{'key': 0, 'features': {'Years': 6}, 'extra': {}}, {'key': 1, 'features': {'Years': 7}, 'extra': {}}])
DEBUG:dffml.util.AsyncContextManagerList.Sources:Entering: MemorySource(MemorySourceConfig(records=[{'key': 0, 'features': {'Years': 6}, 'extra': {}}, {'key': 1, 'features': {'Years': 7}, 'extra': {}}]))
DEBUG:dffml.SLRModel:Loaded model from /tmp/tmp.tPXahPUaJ4/.cache/dffml/slr/90e67083b95825495dc0d2ec594f021e5aa9a5a6e9ddf30907a54b4d8a53e69f23911a1adb02d8679df84da8cf073721.json
DEBUG:dffml.util.AsyncContextManagerListContext.SourcesContext:Entering context: <dffml.source.memory.MemorySourceContext object at 0x7f6cf13f7790>
DEBUG:dffml.SLRModel:Saved model to /tmp/tmp.tPXahPUaJ4/.cache/dffml/slr/90e67083b95825495dc0d2ec594f021e5aa9a5a6e9ddf30907a54b4d8a53e69f23911a1adb02d8679df84da8cf073721.json
ok
test_docstring (tests.test_docstrings.high_level_run) ... Finding tests in run
DEBUG:asyncio:Using selector: EpollSelector
DEBUG:dffml.MemoryInputNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationNetwork:MemoryOperationNetworkConfig(operations=[])
DEBUG:dffml.MemoryLockNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
DEBUG:dffml.MemoryRedundancyChecker:BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))
DEBUG:dffml.MemoryOrchestrator:MemoryOrchestratorConfig(input_network=MemoryInputNetwork(BaseConfig()), operation_network=MemoryOperationNetwork(MemoryOperationNetworkConfig(operations=[])), lock_network=MemoryLockNetwork(BaseConfig()), opimp_network=MemoryOperationImplementationNetwork(MemoryOperationImplementationNetworkConfig(operations={})), rchecker=MemoryRedundancyChecker(BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))))
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf13f4d90>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation send_to_server(send_to_server) with base config
DEBUG:dffml.SendToServerImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf13f4d90>: ([Input(value=echo, definition=send_to_server.inputs.message), Input(value=echo, definition=send_to_server.inputs.message), Input(value=127.0.0.1, definition=send_to_server.inputs.host), Input(value=53643, definition=send_to_server.inputs.port)],)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=echo, definition=send_to_server.inputs.message), Input(value=echo, definition=send_to_server.inputs.message), Input(value=127.0.0.1, definition=send_to_server.inputs.host), Input(value=53643, definition=send_to_server.inputs.port)]
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: 5e4d8abddae8b72a80475fda86597d3b88210e02520d3260df51132088ba47e2
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] send_to_server
DEBUG:dffml.MemoryOrchestratorContext:[5e4d8abddae8b72a80475fda86597d3b88210e02520d3260df51132088ba47e2]: dispatch operation: send_to_server
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] send_to_server
DEBUG:dffml.MemoryOrchestratorContext:[5e4d8abddae8b72a80475fda86597d3b88210e02520d3260df51132088ba47e2]: dispatch operation: send_to_server
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: send_to_server
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'host': '127.0.0.1', 'port': 53643, 'message': 'echo'}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: send_to_server
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'host': '127.0.0.1', 'port': 53643, 'message': 'echo'}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: None
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: None
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
ok
test_docstring (tests.test_docstrings.high_level_save) ... Finding tests in save
DEBUG:dffml.CSVSource:CSVSourceConfig(filename='save.csv', tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)
DEBUG:asyncio:Using selector: EpollSelector
DEBUG:dffml.CSVSource:PosixPath('save.csv') is not a file, initializing memory to empty dict
DEBUG:dffml.CSVSource:save.csv first open
DEBUG:dffml.CSVSource:fieldnames: ['key', 'tag', 'Expertise', 'Trust', 'Years', 'prediction_Salary', 'confidence_Salary']
DEBUG:dffml.CSVSource:save.csv written
DEBUG:dffml.CSVSource:CSVSource(CSVSourceConfig(filename=PosixPath('save.csv'), tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)) saved 1 records
ok
test_docstring (tests.test_docstrings.high_level_train) ... Finding tests in train
DEBUG:dffml.SLRModel:SLRModelConfig(predict=Salary(Feature)[<class 'int'>, 1], features=[Years(Feature)[<class 'int'>, 1]], directory=PosixPath('~/.cache/dffml/slr'))
DEBUG:asyncio:Using selector: EpollSelector
DEBUG:dffml.MemorySource:MemorySourceConfig(records=[{'key': 0, 'features': {'Years': 0, 'Salary': 10}, 'extra': {}}, {'key': 1, 'features': {'Years': 1, 'Salary': 20}, 'extra': {}}, {'key': 2, 'features': {'Years': 2, 'Salary': 30}, 'extra': {}}, {'key': 3, 'features': {'Years': 3, 'Salary': 40}, 'extra': {}}])
DEBUG:dffml.util.AsyncContextManagerList.Sources:Entering: MemorySource(MemorySourceConfig(records=[{'key': 0, 'features': {'Years': 0, 'Salary': 10}, 'extra': {}}, {'key': 1, 'features': {'Years': 1, 'Salary': 20}, 'extra': {}}, {'key': 2, 'features': {'Years': 2, 'Salary': 30}, 'extra': {}}, {'key': 3, 'features': {'Years': 3, 'Salary': 40}, 'extra': {}}]))
DEBUG:dffml.SLRModel:Loaded model from /tmp/tmp.tPXahPUaJ4/.cache/dffml/slr/90e67083b95825495dc0d2ec594f021e5aa9a5a6e9ddf30907a54b4d8a53e69f23911a1adb02d8679df84da8cf073721.json
DEBUG:dffml.util.AsyncContextManagerListContext.SourcesContext:Entering context: <dffml.source.memory.MemorySourceContext object at 0x7f6cf13df910>
DEBUG:dffml.SLRModel:Number of input records: 4
DEBUG:dffml.SLRModel:Saved model to /tmp/tmp.tPXahPUaJ4/.cache/dffml/slr/90e67083b95825495dc0d2ec594f021e5aa9a5a6e9ddf30907a54b4d8a53e69f23911a1adb02d8679df84da8cf073721.json
ok
test_docstring (tests.test_docstrings.noasync_accuracy) ... DEBUG:dffml.SLRModel:SLRModelConfig(predict=Salary(Feature)[<class 'int'>, 1], features=[Years(Feature)[<class 'int'>, 1]], directory=PosixPath('~/.cache/dffml/slr'))
DEBUG:asyncio:Using selector: EpollSelector
DEBUG:dffml.MemorySource:MemorySourceConfig(records=[{'key': 0, 'features': {'Years': 0, 'Salary': 10}, 'extra': {}}, {'key': 1, 'features': {'Years': 1, 'Salary': 20}, 'extra': {}}, {'key': 2, 'features': {'Years': 2, 'Salary': 30}, 'extra': {}}, {'key': 3, 'features': {'Years': 3, 'Salary': 40}, 'extra': {}}])
DEBUG:dffml.util.AsyncContextManagerList.Sources:Entering: MemorySource(MemorySourceConfig(records=[{'key': 0, 'features': {'Years': 0, 'Salary': 10}, 'extra': {}}, {'key': 1, 'features': {'Years': 1, 'Salary': 20}, 'extra': {}}, {'key': 2, 'features': {'Years': 2, 'Salary': 30}, 'extra': {}}, {'key': 3, 'features': {'Years': 3, 'Salary': 40}, 'extra': {}}]))
DEBUG:dffml.SLRModel:Loaded model from /tmp/tmp.tPXahPUaJ4/.cache/dffml/slr/90e67083b95825495dc0d2ec594f021e5aa9a5a6e9ddf30907a54b4d8a53e69f23911a1adb02d8679df84da8cf073721.json
DEBUG:dffml.util.AsyncContextManagerListContext.SourcesContext:Entering context: <dffml.source.memory.MemorySourceContext object at 0x7f6cf16e3090>
DEBUG:dffml.SLRModel:Number of input records: 4
DEBUG:dffml.SLRModel:Saved model to /tmp/tmp.tPXahPUaJ4/.cache/dffml/slr/90e67083b95825495dc0d2ec594f021e5aa9a5a6e9ddf30907a54b4d8a53e69f23911a1adb02d8679df84da8cf073721.json
Finding tests in accuracy
DEBUG:dffml.SLRModel:SLRModelConfig(predict=Salary(Feature)[<class 'int'>, 1], features=[Years(Feature)[<class 'int'>, 1]], directory=PosixPath('~/.cache/dffml/slr'))
DEBUG:asyncio:Using selector: EpollSelector
DEBUG:dffml.MemorySource:MemorySourceConfig(records=[{'key': 0, 'features': {'Years': 4, 'Salary': 50}, 'extra': {}}, {'key': 1, 'features': {'Years': 5, 'Salary': 60}, 'extra': {}}])
DEBUG:dffml.util.AsyncContextManagerList.Sources:Entering: MemorySource(MemorySourceConfig(records=[{'key': 0, 'features': {'Years': 4, 'Salary': 50}, 'extra': {}}, {'key': 1, 'features': {'Years': 5, 'Salary': 60}, 'extra': {}}]))
DEBUG:dffml.SLRModel:Loaded model from /tmp/tmp.tPXahPUaJ4/.cache/dffml/slr/90e67083b95825495dc0d2ec594f021e5aa9a5a6e9ddf30907a54b4d8a53e69f23911a1adb02d8679df84da8cf073721.json
DEBUG:dffml.util.AsyncContextManagerListContext.SourcesContext:Entering context: <dffml.source.memory.MemorySourceContext object at 0x7f6cf14202d0>
DEBUG:dffml.SLRModel:Saved model to /tmp/tmp.tPXahPUaJ4/.cache/dffml/slr/90e67083b95825495dc0d2ec594f021e5aa9a5a6e9ddf30907a54b4d8a53e69f23911a1adb02d8679df84da8cf073721.json
ok
test_docstring (tests.test_docstrings.noasync_predict) ... DEBUG:dffml.SLRModel:SLRModelConfig(predict=Salary(Feature)[<class 'int'>, 1], features=[Years(Feature)[<class 'int'>, 1]], directory=PosixPath('~/.cache/dffml/slr'))
DEBUG:asyncio:Using selector: EpollSelector
DEBUG:dffml.MemorySource:MemorySourceConfig(records=[{'key': 0, 'features': {'Years': 0, 'Salary': 10}, 'extra': {}}, {'key': 1, 'features': {'Years': 1, 'Salary': 20}, 'extra': {}}, {'key': 2, 'features': {'Years': 2, 'Salary': 30}, 'extra': {}}, {'key': 3, 'features': {'Years': 3, 'Salary': 40}, 'extra': {}}])
DEBUG:dffml.util.AsyncContextManagerList.Sources:Entering: MemorySource(MemorySourceConfig(records=[{'key': 0, 'features': {'Years': 0, 'Salary': 10}, 'extra': {}}, {'key': 1, 'features': {'Years': 1, 'Salary': 20}, 'extra': {}}, {'key': 2, 'features': {'Years': 2, 'Salary': 30}, 'extra': {}}, {'key': 3, 'features': {'Years': 3, 'Salary': 40}, 'extra': {}}]))
DEBUG:dffml.SLRModel:Loaded model from /tmp/tmp.tPXahPUaJ4/.cache/dffml/slr/90e67083b95825495dc0d2ec594f021e5aa9a5a6e9ddf30907a54b4d8a53e69f23911a1adb02d8679df84da8cf073721.json
DEBUG:dffml.util.AsyncContextManagerListContext.SourcesContext:Entering context: <dffml.source.memory.MemorySourceContext object at 0x7f6cf16e3410>
DEBUG:dffml.SLRModel:Number of input records: 4
DEBUG:dffml.SLRModel:Saved model to /tmp/tmp.tPXahPUaJ4/.cache/dffml/slr/90e67083b95825495dc0d2ec594f021e5aa9a5a6e9ddf30907a54b4d8a53e69f23911a1adb02d8679df84da8cf073721.json
Finding tests in predict
DEBUG:dffml.SLRModel:SLRModelConfig(predict=Salary(Feature)[<class 'int'>, 1], features=[Years(Feature)[<class 'int'>, 1]], directory=PosixPath('~/.cache/dffml/slr'))
DEBUG:asyncio:Using selector: EpollSelector
DEBUG:dffml.MemorySource:MemorySourceConfig(records=[{'key': 0, 'features': {'Years': 6}, 'extra': {}}, {'key': 1, 'features': {'Years': 7}, 'extra': {}}])
DEBUG:dffml.util.AsyncContextManagerList.Sources:Entering: MemorySource(MemorySourceConfig(records=[{'key': 0, 'features': {'Years': 6}, 'extra': {}}, {'key': 1, 'features': {'Years': 7}, 'extra': {}}]))
DEBUG:dffml.SLRModel:Loaded model from /tmp/tmp.tPXahPUaJ4/.cache/dffml/slr/90e67083b95825495dc0d2ec594f021e5aa9a5a6e9ddf30907a54b4d8a53e69f23911a1adb02d8679df84da8cf073721.json
DEBUG:dffml.util.AsyncContextManagerListContext.SourcesContext:Entering context: <dffml.source.memory.MemorySourceContext object at 0x7f6cf141bad0>
DEBUG:dffml.SLRModel:Saved model to /tmp/tmp.tPXahPUaJ4/.cache/dffml/slr/90e67083b95825495dc0d2ec594f021e5aa9a5a6e9ddf30907a54b4d8a53e69f23911a1adb02d8679df84da8cf073721.json
ok
test_docstring (tests.test_docstrings.noasync_train) ... Finding tests in train
DEBUG:dffml.SLRModel:SLRModelConfig(predict=Salary(Feature)[<class 'int'>, 1], features=[Years(Feature)[<class 'int'>, 1]], directory=PosixPath('~/.cache/dffml/slr'))
DEBUG:asyncio:Using selector: EpollSelector
DEBUG:dffml.MemorySource:MemorySourceConfig(records=[{'key': 0, 'features': {'Years': 0, 'Salary': 10}, 'extra': {}}, {'key': 1, 'features': {'Years': 1, 'Salary': 20}, 'extra': {}}, {'key': 2, 'features': {'Years': 2, 'Salary': 30}, 'extra': {}}, {'key': 3, 'features': {'Years': 3, 'Salary': 40}, 'extra': {}}])
DEBUG:dffml.util.AsyncContextManagerList.Sources:Entering: MemorySource(MemorySourceConfig(records=[{'key': 0, 'features': {'Years': 0, 'Salary': 10}, 'extra': {}}, {'key': 1, 'features': {'Years': 1, 'Salary': 20}, 'extra': {}}, {'key': 2, 'features': {'Years': 2, 'Salary': 30}, 'extra': {}}, {'key': 3, 'features': {'Years': 3, 'Salary': 40}, 'extra': {}}]))
DEBUG:dffml.SLRModel:Loaded model from /tmp/tmp.tPXahPUaJ4/.cache/dffml/slr/90e67083b95825495dc0d2ec594f021e5aa9a5a6e9ddf30907a54b4d8a53e69f23911a1adb02d8679df84da8cf073721.json
DEBUG:dffml.util.AsyncContextManagerListContext.SourcesContext:Entering context: <dffml.source.memory.MemorySourceContext object at 0x7f6cf13f77d0>
DEBUG:dffml.SLRModel:Number of input records: 4
DEBUG:dffml.SLRModel:Saved model to /tmp/tmp.tPXahPUaJ4/.cache/dffml/slr/90e67083b95825495dc0d2ec594f021e5aa9a5a6e9ddf30907a54b4d8a53e69f23911a1adb02d8679df84da8cf073721.json
ok
test_docstring (tests.test_docstrings.operation_dataflow_run_dataflow) ... Finding tests in run_dataflow
DEBUG:asyncio:Using selector: EpollSelector
DEBUG:dffml.MemoryInputNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationNetwork:MemoryOperationNetworkConfig(operations=[])
DEBUG:dffml.MemoryLockNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
DEBUG:dffml.MemoryRedundancyChecker:BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))
DEBUG:dffml.MemoryOrchestrator:MemoryOrchestratorConfig(input_network=MemoryInputNetwork(BaseConfig()), operation_network=MemoryOperationNetwork(MemoryOperationNetworkConfig(operations=[])), lock_network=MemoryLockNetwork(BaseConfig()), opimp_network=MemoryOperationImplementationNetwork(MemoryOperationImplementationNetworkConfig(operations={})), rchecker=MemoryRedundancyChecker(BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))))
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf1403690>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation dffml.dataflow.run(dffml.dataflow.run) with provided config RunDataFlowConfig(dataflow=<dffml.df.types.DataFlow object at 0x7f6cf1403710>)
DEBUG:dffml.DffmlDataflowRunImplementation:RunDataFlowConfig(dataflow=<dffml.df.types.DataFlow object at 0x7f6cf1403710>)
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation get_single(get_single) with base config
DEBUG:dffml.GetSingleImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf1403690>: ({'run_subflow': [Input(value={'dffml': [{'value': 'https://github.com/intel/dffml', 'definition': 'URL'}]}, definition=flow_inputs)]},)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value={'dffml': [{'value': 'https://github.com/intel/dffml', 'definition': 'URL'}]}, definition=flow_inputs)]
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: run_subflow
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] dffml.dataflow.run
DEBUG:dffml.MemoryOrchestratorContext:[run_subflow]: dispatch operation: dffml.dataflow.run
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: dffml.dataflow.run
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'inputs': {'dffml': [{'value': 'https://github.com/intel/dffml', 'definition': 'URL'}]}}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf1403710>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation get_single(get_single) with base config
DEBUG:dffml.GetSingleImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf1403710>: ({'dffml': [Input(value=https://github.com/intel/dffml, definition=URL)]},)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=https://github.com/intel/dffml, definition=URL)]
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: dffml
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: get_single
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': ['URL']}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.GetSingle:output spec: [URL]
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'URL': 'https://github.com/intel/dffml'}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'results': [{'dffml': {'URL': 'https://github.com/intel/dffml'}}]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: get_single
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': ['flow_results']}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.GetSingle:output spec: [flow_results]
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'flow_results': {'dffml': {'URL': 'https://github.com/intel/dffml'}}}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
DEBUG:asyncio:Using selector: EpollSelector
DEBUG:dffml.MemoryInputNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationNetwork:MemoryOperationNetworkConfig(operations=[])
DEBUG:dffml.MemoryLockNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
DEBUG:dffml.MemoryRedundancyChecker:BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))
DEBUG:dffml.MemoryOrchestrator:MemoryOrchestratorConfig(input_network=MemoryInputNetwork(BaseConfig()), operation_network=MemoryOperationNetwork(MemoryOperationNetworkConfig(operations=[])), lock_network=MemoryLockNetwork(BaseConfig()), opimp_network=MemoryOperationImplementationNetwork(MemoryOperationImplementationNetworkConfig(operations={})), rchecker=MemoryRedundancyChecker(BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))))
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf13df3d0>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation dffml.dataflow.run(dffml.dataflow.run) with provided config RunDataFlowConfig(dataflow=<dffml.df.types.DataFlow object at 0x7f6cf1420450>)
DEBUG:dffml.DffmlDataflowRunImplementation:RunDataFlowConfig(dataflow=<dffml.df.types.DataFlow object at 0x7f6cf1420450>)
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation get_single(get_single) with base config
DEBUG:dffml.GetSingleImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf13df3d0>: ({'run_subflow': [Input(value=https://github.com/intel/dffml, definition=URL)]},)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=https://github.com/intel/dffml, definition=URL)]
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: run_subflow
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] dffml.dataflow.run
DEBUG:dffml.MemoryOrchestratorContext:[run_subflow]: dispatch operation: dffml.dataflow.run
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: dffml.dataflow.run
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'URL': 'https://github.com/intel/dffml'}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf1420450>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation last_path(last_path) with base config
DEBUG:dffml.LastPathImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation get_single(get_single) with base config
DEBUG:dffml.GetSingleImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf1420450>: ({'https://github.com/intel/dffml': [Input(value=https://github.com/intel/dffml, definition=URL)]},)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=https://github.com/intel/dffml, definition=URL)]
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: https://github.com/intel/dffml
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] last_path
DEBUG:dffml.MemoryOrchestratorContext:[https://github.com/intel/dffml]: dispatch operation: last_path
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: last_path
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'url': 'https://github.com/intel/dffml'}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'last': 'dffml'}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: get_single
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': ['last_element_in_path']}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.GetSingle:output spec: [last_element_in_path]
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'last_element_in_path': 'dffml'}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'last_element_in_path': 'dffml'}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: get_single
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': ['last_element_in_path']}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.GetSingle:output spec: [last_element_in_path]
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'last_element_in_path': 'dffml'}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
ok
test_docstring (tests.test_docstrings.operation_db_db_query_create_table) ... DEBUG:asyncio:Using selector: EpollSelector
DEBUG:dffml.SqliteDatabase:SqliteDatabaseConfig(filename='examples.db')
DEBUG:dffml.MemoryInputNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationNetwork:MemoryOperationNetworkConfig(operations=[])
DEBUG:dffml.MemoryLockNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
DEBUG:dffml.MemoryRedundancyChecker:BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))
DEBUG:dffml.MemoryOrchestrator:MemoryOrchestratorConfig(input_network=MemoryInputNetwork(BaseConfig()), operation_network=MemoryOperationNetwork(MemoryOperationNetworkConfig(operations=[])), lock_network=MemoryLockNetwork(BaseConfig()), opimp_network=MemoryOperationImplementationNetwork(MemoryOperationImplementationNetworkConfig(operations={})), rchecker=MemoryRedundancyChecker(BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))))
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf1453f90>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation db_query_create(db_query_create_table) with provided config DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='examples.db')))
DEBUG:dffml.DbQueryCreateTableImplementation:DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='examples.db')))
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf1453f90>: ([Input(value=myTable, definition=query_table), Input(value={'key': 'INTEGER NOT NULL PRIMARY KEY', 'firstName': 'text', 'lastName': 'text', 'age': 'int'}, definition=query_cols)],)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=myTable, definition=query_table), Input(value={'key': 'INTEGER NOT NULL PRIMARY KEY', 'firstName': 'text', 'lastName': 'text', 'age': 'int'}, definition=query_cols)]
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: c5ba52cff53d51f3691f4b51eb33f99225366f0ed7c8e26d6fd1055c7480c0d2
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] db_query_create
DEBUG:dffml.MemoryOrchestratorContext:[c5ba52cff53d51f3691f4b51eb33f99225366f0ed7c8e26d6fd1055c7480c0d2]: dispatch operation: db_query_create
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: db_query_create
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'table_name': 'myTable', 'cols': {'key': 'INTEGER NOT NULL PRIMARY KEY', 'firstName': 'text', 'lastName': 'text', 'age': 'int'}}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.SqliteDatabaseContext:CREATE TABLE IF NOT EXISTS myTable (`key` INTEGER NOT NULL PRIMARY KEY, `firstName` text, `lastName` text, `age` int)
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: None
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
Finding tests in db_query_create_table
DEBUG:dffml.SqliteDatabase:SqliteDatabaseConfig(filename='examples.db')
DEBUG:asyncio:Using selector: EpollSelector
DEBUG:dffml.MemoryInputNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationNetwork:MemoryOperationNetworkConfig(operations=[])
DEBUG:dffml.MemoryLockNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
DEBUG:dffml.MemoryRedundancyChecker:BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))
DEBUG:dffml.MemoryOrchestrator:MemoryOrchestratorConfig(input_network=MemoryInputNetwork(BaseConfig()), operation_network=MemoryOperationNetwork(MemoryOperationNetworkConfig(operations=[])), lock_network=MemoryLockNetwork(BaseConfig()), opimp_network=MemoryOperationImplementationNetwork(MemoryOperationImplementationNetworkConfig(operations={})), rchecker=MemoryRedundancyChecker(BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))))
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf14109d0>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation db_query_create(db_query_create_table) with provided config DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='examples.db')))
DEBUG:dffml.DbQueryCreateTableImplementation:DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='examples.db')))
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf14109d0>: ([Input(value=myTable1, definition=query_table), Input(value={'key': 'real', 'firstName': 'text', 'lastName': 'text', 'age': 'real'}, definition=query_cols)],)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=myTable1, definition=query_table), Input(value={'key': 'real', 'firstName': 'text', 'lastName': 'text', 'age': 'real'}, definition=query_cols)]
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: dd3866ef4e6ff095726ee5d7226cb22d9e37e7482fa2ec496ad6327205fde194
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] db_query_create
DEBUG:dffml.MemoryOrchestratorContext:[dd3866ef4e6ff095726ee5d7226cb22d9e37e7482fa2ec496ad6327205fde194]: dispatch operation: db_query_create
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: db_query_create
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'table_name': 'myTable1', 'cols': {'key': 'real', 'firstName': 'text', 'lastName': 'text', 'age': 'real'}}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.SqliteDatabaseContext:CREATE TABLE IF NOT EXISTS myTable1 (`key` real, `firstName` text, `lastName` text, `age` real)
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: None
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
ok
test_docstring (tests.test_docstrings.operation_db_db_query_insert) ... DEBUG:asyncio:Using selector: EpollSelector
DEBUG:dffml.SqliteDatabase:SqliteDatabaseConfig(filename='examples.db')
DEBUG:dffml.MemoryInputNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationNetwork:MemoryOperationNetworkConfig(operations=[])
DEBUG:dffml.MemoryLockNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
DEBUG:dffml.MemoryRedundancyChecker:BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))
DEBUG:dffml.MemoryOrchestrator:MemoryOrchestratorConfig(input_network=MemoryInputNetwork(BaseConfig()), operation_network=MemoryOperationNetwork(MemoryOperationNetworkConfig(operations=[])), lock_network=MemoryLockNetwork(BaseConfig()), opimp_network=MemoryOperationImplementationNetwork(MemoryOperationImplementationNetworkConfig(operations={})), rchecker=MemoryRedundancyChecker(BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))))
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf13df3d0>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation db_query_create(db_query_create_table) with provided config DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='examples.db')))
DEBUG:dffml.DbQueryCreateTableImplementation:DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='examples.db')))
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf13df3d0>: ([Input(value=myTable, definition=query_table), Input(value={'key': 'INTEGER NOT NULL PRIMARY KEY', 'firstName': 'text', 'lastName': 'text', 'age': 'int'}, definition=query_cols)],)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=myTable, definition=query_table), Input(value={'key': 'INTEGER NOT NULL PRIMARY KEY', 'firstName': 'text', 'lastName': 'text', 'age': 'int'}, definition=query_cols)]
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: 714db891ab0594a1b6b56d8679c3c14558f8c912b97220ba4eefbe6de73554b1
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] db_query_create
DEBUG:dffml.MemoryOrchestratorContext:[714db891ab0594a1b6b56d8679c3c14558f8c912b97220ba4eefbe6de73554b1]: dispatch operation: db_query_create
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: db_query_create
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'table_name': 'myTable', 'cols': {'key': 'INTEGER NOT NULL PRIMARY KEY', 'firstName': 'text', 'lastName': 'text', 'age': 'int'}}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.SqliteDatabaseContext:CREATE TABLE IF NOT EXISTS myTable (`key` INTEGER NOT NULL PRIMARY KEY, `firstName` text, `lastName` text, `age` int)
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: None
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
Finding tests in db_query_insert
DEBUG:dffml.SqliteDatabase:SqliteDatabaseConfig(filename='examples.db')
DEBUG:asyncio:Using selector: EpollSelector
DEBUG:dffml.MemoryInputNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationNetwork:MemoryOperationNetworkConfig(operations=[])
DEBUG:dffml.MemoryLockNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
DEBUG:dffml.MemoryRedundancyChecker:BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))
DEBUG:dffml.MemoryOrchestrator:MemoryOrchestratorConfig(input_network=MemoryInputNetwork(BaseConfig()), operation_network=MemoryOperationNetwork(MemoryOperationNetworkConfig(operations=[])), lock_network=MemoryLockNetwork(BaseConfig()), opimp_network=MemoryOperationImplementationNetwork(MemoryOperationImplementationNetworkConfig(operations={})), rchecker=MemoryRedundancyChecker(BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))))
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf1410050>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation db_query_insert(db_query_insert) with provided config DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='examples.db')))
DEBUG:dffml.DbQueryInsertImplementation:DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='examples.db')))
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation db_query_lookup(db_query_lookup) with provided config DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='examples.db')))
DEBUG:dffml.DbQueryLookupImplementation:DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='examples.db')))
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation get_single(get_single) with base config
DEBUG:dffml.GetSingleImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf1410050>: ({'insert': [Input(value=myTable, definition=query_table), Input(value={'key': 10, 'firstName': 'John', 'lastName': 'Doe', 'age': 16}, definition=query_data)], 'lookup': [Input(value=myTable, definition=query_table), Input(value=['firstName', 'lastName', 'age'], definition=query_cols), Input(value=[], definition=query_conditions), Input(value=['query_lookups'], definition=get_single_spec)]},)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=myTable, definition=query_table), Input(value={'key': 10, 'firstName': 'John', 'lastName': 'Doe', 'age': 16}, definition=query_data)]
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=myTable, definition=query_table), Input(value=['firstName', 'lastName', 'age'], definition=query_cols), Input(value=[], definition=query_conditions), Input(value=['query_lookups'], definition=get_single_spec)]
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: insert
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: lookup
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] db_query_insert
DEBUG:dffml.MemoryOrchestratorContext:[insert]: dispatch operation: db_query_insert
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] db_query_lookup
DEBUG:dffml.MemoryOrchestratorContext:[lookup]: dispatch operation: db_query_lookup
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: db_query_insert
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'table_name': 'myTable', 'data': {'key': 10, 'firstName': 'John', 'lastName': 'Doe', 'age': 16}}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.SqliteDatabaseContext:INSERT INTO myTable ( `key`, `firstName`, `lastName`, `age` ) VALUES( ?, ?, ?, ? )
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: None
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: db_query_lookup
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'table_name': 'myTable', 'cols': ['firstName', 'lastName', 'age'], 'conditions': []}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.SqliteDatabaseContext:SELECT `firstName`, `lastName`, `age` FROM myTable
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'lookups': [{'firstName': 'John', 'lastName': 'Doe', 'age': 16}]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: get_single
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': ['query_lookups']}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.GetSingle:output spec: [query_lookups]
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'query_lookups': [{'firstName': 'John', 'lastName': 'Doe', 'age': 16}]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
ok
test_docstring (tests.test_docstrings.operation_db_db_query_insert_or_update) ... DEBUG:asyncio:Using selector: EpollSelector
DEBUG:dffml.SqliteDatabase:SqliteDatabaseConfig(filename='examples.db')
DEBUG:dffml.MemoryInputNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationNetwork:MemoryOperationNetworkConfig(operations=[])
DEBUG:dffml.MemoryLockNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
DEBUG:dffml.MemoryRedundancyChecker:BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))
DEBUG:dffml.MemoryOrchestrator:MemoryOrchestratorConfig(input_network=MemoryInputNetwork(BaseConfig()), operation_network=MemoryOperationNetwork(MemoryOperationNetworkConfig(operations=[])), lock_network=MemoryLockNetwork(BaseConfig()), opimp_network=MemoryOperationImplementationNetwork(MemoryOperationImplementationNetworkConfig(operations={})), rchecker=MemoryRedundancyChecker(BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))))
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf13df3d0>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation db_query_create(db_query_create_table) with provided config DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='examples.db')))
DEBUG:dffml.DbQueryCreateTableImplementation:DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='examples.db')))
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf13df3d0>: ([Input(value=myTable, definition=query_table), Input(value={'key': 'INTEGER NOT NULL PRIMARY KEY', 'firstName': 'text', 'lastName': 'text', 'age': 'int'}, definition=query_cols)],)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=myTable, definition=query_table), Input(value={'key': 'INTEGER NOT NULL PRIMARY KEY', 'firstName': 'text', 'lastName': 'text', 'age': 'int'}, definition=query_cols)]
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: 24e89a4bb9ffe0213b67eaacf1043eee1530d1eaab3d11f5cf22a1e9f6d90154
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] db_query_create
DEBUG:dffml.MemoryOrchestratorContext:[24e89a4bb9ffe0213b67eaacf1043eee1530d1eaab3d11f5cf22a1e9f6d90154]: dispatch operation: db_query_create
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: db_query_create
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'table_name': 'myTable', 'cols': {'key': 'INTEGER NOT NULL PRIMARY KEY', 'firstName': 'text', 'lastName': 'text', 'age': 'int'}}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.SqliteDatabaseContext:CREATE TABLE IF NOT EXISTS myTable (`key` INTEGER NOT NULL PRIMARY KEY, `firstName` text, `lastName` text, `age` int)
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: None
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
Finding tests in db_query_insert_or_update
DEBUG:dffml.SqliteDatabase:SqliteDatabaseConfig(filename='examples.db')
DEBUG:asyncio:Using selector: EpollSelector
DEBUG:dffml.MemoryInputNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationNetwork:MemoryOperationNetworkConfig(operations=[])
DEBUG:dffml.MemoryLockNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
DEBUG:dffml.MemoryRedundancyChecker:BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))
DEBUG:dffml.MemoryOrchestrator:MemoryOrchestratorConfig(input_network=MemoryInputNetwork(BaseConfig()), operation_network=MemoryOperationNetwork(MemoryOperationNetworkConfig(operations=[])), lock_network=MemoryLockNetwork(BaseConfig()), opimp_network=MemoryOperationImplementationNetwork(MemoryOperationImplementationNetworkConfig(operations={})), rchecker=MemoryRedundancyChecker(BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))))
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf14108d0>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation db_query_insert_or_update(db_query_insert_or_update) with provided config DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='examples.db')))
DEBUG:dffml.DbQueryInsertOrUpdateImplementation:DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='examples.db')))
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation db_query_lookup(db_query_lookup) with provided config DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='examples.db')))
DEBUG:dffml.DbQueryLookupImplementation:DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='examples.db')))
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation get_single(get_single) with base config
DEBUG:dffml.GetSingleImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf14108d0>: ({'insert_or_update': [Input(value=myTable, definition=query_table), Input(value={'key': 11, 'firstName': 'John', 'lastName': 'Wick', 'age': 38}, definition=query_data)], 'lookup': [Input(value=myTable, definition=query_table), Input(value=['firstName', 'lastName', 'age'], definition=query_cols), Input(value=[], definition=query_conditions), Input(value=['query_lookups'], definition=get_single_spec)]},)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=myTable, definition=query_table), Input(value={'key': 11, 'firstName': 'John', 'lastName': 'Wick', 'age': 38}, definition=query_data)]
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=myTable, definition=query_table), Input(value=['firstName', 'lastName', 'age'], definition=query_cols), Input(value=[], definition=query_conditions), Input(value=['query_lookups'], definition=get_single_spec)]
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: insert_or_update
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: lookup
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] db_query_insert_or_update
DEBUG:dffml.MemoryOrchestratorContext:[insert_or_update]: dispatch operation: db_query_insert_or_update
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] db_query_lookup
DEBUG:dffml.MemoryOrchestratorContext:[lookup]: dispatch operation: db_query_lookup
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: db_query_insert_or_update
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'table_name': 'myTable', 'data': {'key': 11, 'firstName': 'John', 'lastName': 'Wick', 'age': 38}}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.SqliteDatabaseContext:INSERT INTO myTable ( `key`, `firstName`, `lastName`, `age` ) VALUES( ?, ?, ?, ? )
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: None
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: db_query_lookup
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'table_name': 'myTable', 'cols': ['firstName', 'lastName', 'age'], 'conditions': []}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.SqliteDatabaseContext:SELECT `firstName`, `lastName`, `age` FROM myTable
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'lookups': [{'firstName': 'John', 'lastName': 'Wick', 'age': 38}]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: get_single
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': ['query_lookups']}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.GetSingle:output spec: [query_lookups]
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'query_lookups': [{'firstName': 'John', 'lastName': 'Wick', 'age': 38}]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
DEBUG:asyncio:Using selector: EpollSelector
DEBUG:dffml.MemoryInputNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationNetwork:MemoryOperationNetworkConfig(operations=[])
DEBUG:dffml.MemoryLockNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
DEBUG:dffml.MemoryRedundancyChecker:BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))
DEBUG:dffml.MemoryOrchestrator:MemoryOrchestratorConfig(input_network=MemoryInputNetwork(BaseConfig()), operation_network=MemoryOperationNetwork(MemoryOperationNetworkConfig(operations=[])), lock_network=MemoryLockNetwork(BaseConfig()), opimp_network=MemoryOperationImplementationNetwork(MemoryOperationImplementationNetworkConfig(operations={})), rchecker=MemoryRedundancyChecker(BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))))
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf14108d0>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation db_query_insert_or_update(db_query_insert_or_update) with provided config DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='examples.db')))
DEBUG:dffml.DbQueryInsertOrUpdateImplementation:DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='examples.db')))
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation db_query_lookup(db_query_lookup) with provided config DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='examples.db')))
DEBUG:dffml.DbQueryLookupImplementation:DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='examples.db')))
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation get_single(get_single) with base config
DEBUG:dffml.GetSingleImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf14108d0>: ({'insert_or_update': [Input(value=myTable, definition=query_table), Input(value={'key': 11, 'firstName': 'John', 'lastName': 'Wick', 'age': 39}, definition=query_data)], 'lookup': [Input(value=myTable, definition=query_table), Input(value=['firstName', 'lastName', 'age'], definition=query_cols), Input(value=[], definition=query_conditions), Input(value=['query_lookups'], definition=get_single_spec)]},)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=myTable, definition=query_table), Input(value={'key': 11, 'firstName': 'John', 'lastName': 'Wick', 'age': 39}, definition=query_data)]
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=myTable, definition=query_table), Input(value=['firstName', 'lastName', 'age'], definition=query_cols), Input(value=[], definition=query_conditions), Input(value=['query_lookups'], definition=get_single_spec)]
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: insert_or_update
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: lookup
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] db_query_insert_or_update
DEBUG:dffml.MemoryOrchestratorContext:[insert_or_update]: dispatch operation: db_query_insert_or_update
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] db_query_lookup
DEBUG:dffml.MemoryOrchestratorContext:[lookup]: dispatch operation: db_query_lookup
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: db_query_insert_or_update
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'table_name': 'myTable', 'data': {'key': 11, 'firstName': 'John', 'lastName': 'Wick', 'age': 39}}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.SqliteDatabaseContext:INSERT INTO myTable ( `key`, `firstName`, `lastName`, `age` ) VALUES( ?, ?, ?, ? )
DEBUG:dffml.SqliteDatabaseContext:UPDATE myTable SET `firstName` = ? ,`lastName` = ? ,`age` = ? WHERE ((`key` = ? ))
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: None
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: db_query_lookup
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'table_name': 'myTable', 'cols': ['firstName', 'lastName', 'age'], 'conditions': []}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.SqliteDatabaseContext:SELECT `firstName`, `lastName`, `age` FROM myTable
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'lookups': [{'firstName': 'John', 'lastName': 'Wick', 'age': 39}]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: get_single
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': ['query_lookups']}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.GetSingle:output spec: [query_lookups]
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'query_lookups': [{'firstName': 'John', 'lastName': 'Wick', 'age': 39}]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
ok
test_docstring (tests.test_docstrings.operation_db_db_query_lookup) ... DEBUG:asyncio:Using selector: EpollSelector
DEBUG:dffml.SqliteDatabase:SqliteDatabaseConfig(filename='examples.db')
DEBUG:dffml.MemoryInputNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationNetwork:MemoryOperationNetworkConfig(operations=[])
DEBUG:dffml.MemoryLockNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
DEBUG:dffml.MemoryRedundancyChecker:BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))
DEBUG:dffml.MemoryOrchestrator:MemoryOrchestratorConfig(input_network=MemoryInputNetwork(BaseConfig()), operation_network=MemoryOperationNetwork(MemoryOperationNetworkConfig(operations=[])), lock_network=MemoryLockNetwork(BaseConfig()), opimp_network=MemoryOperationImplementationNetwork(MemoryOperationImplementationNetworkConfig(operations={})), rchecker=MemoryRedundancyChecker(BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))))
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf16e3510>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation db_query_create(db_query_create_table) with provided config DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='examples.db')))
DEBUG:dffml.DbQueryCreateTableImplementation:DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='examples.db')))
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf16e3510>: ([Input(value=myTable, definition=query_table), Input(value={'key': 'INTEGER NOT NULL PRIMARY KEY', 'firstName': 'text', 'lastName': 'text', 'age': 'int'}, definition=query_cols)],)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=myTable, definition=query_table), Input(value={'key': 'INTEGER NOT NULL PRIMARY KEY', 'firstName': 'text', 'lastName': 'text', 'age': 'int'}, definition=query_cols)]
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: 4299185db2b0b4160740f0b6320799333f42255985ca738b4af4f021b8a2a5de
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] db_query_create
DEBUG:dffml.MemoryOrchestratorContext:[4299185db2b0b4160740f0b6320799333f42255985ca738b4af4f021b8a2a5de]: dispatch operation: db_query_create
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: db_query_create
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'table_name': 'myTable', 'cols': {'key': 'INTEGER NOT NULL PRIMARY KEY', 'firstName': 'text', 'lastName': 'text', 'age': 'int'}}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.SqliteDatabaseContext:CREATE TABLE IF NOT EXISTS myTable (`key` INTEGER NOT NULL PRIMARY KEY, `firstName` text, `lastName` text, `age` int)
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: None
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
Finding tests in db_query_insert
DEBUG:dffml.SqliteDatabase:SqliteDatabaseConfig(filename='examples.db')
DEBUG:asyncio:Using selector: EpollSelector
DEBUG:dffml.MemoryInputNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationNetwork:MemoryOperationNetworkConfig(operations=[])
DEBUG:dffml.MemoryLockNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
DEBUG:dffml.MemoryRedundancyChecker:BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))
DEBUG:dffml.MemoryOrchestrator:MemoryOrchestratorConfig(input_network=MemoryInputNetwork(BaseConfig()), operation_network=MemoryOperationNetwork(MemoryOperationNetworkConfig(operations=[])), lock_network=MemoryLockNetwork(BaseConfig()), opimp_network=MemoryOperationImplementationNetwork(MemoryOperationImplementationNetworkConfig(operations={})), rchecker=MemoryRedundancyChecker(BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))))
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf13a3b50>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation db_query_insert(db_query_insert) with provided config DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='examples.db')))
DEBUG:dffml.DbQueryInsertImplementation:DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='examples.db')))
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation db_query_lookup(db_query_lookup) with provided config DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='examples.db')))
DEBUG:dffml.DbQueryLookupImplementation:DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='examples.db')))
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation get_single(get_single) with base config
DEBUG:dffml.GetSingleImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf13a3b50>: ({'insert': [Input(value=myTable, definition=query_table), Input(value={'key': 10, 'firstName': 'John', 'lastName': 'Doe', 'age': 16}, definition=query_data)], 'lookup': [Input(value=myTable, definition=query_table), Input(value=['firstName', 'lastName', 'age'], definition=query_cols), Input(value=[], definition=query_conditions), Input(value=['query_lookups'], definition=get_single_spec)]},)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=myTable, definition=query_table), Input(value={'key': 10, 'firstName': 'John', 'lastName': 'Doe', 'age': 16}, definition=query_data)]
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=myTable, definition=query_table), Input(value=['firstName', 'lastName', 'age'], definition=query_cols), Input(value=[], definition=query_conditions), Input(value=['query_lookups'], definition=get_single_spec)]
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: insert
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: lookup
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] db_query_insert
DEBUG:dffml.MemoryOrchestratorContext:[insert]: dispatch operation: db_query_insert
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] db_query_lookup
DEBUG:dffml.MemoryOrchestratorContext:[lookup]: dispatch operation: db_query_lookup
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: db_query_insert
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'table_name': 'myTable', 'data': {'key': 10, 'firstName': 'John', 'lastName': 'Doe', 'age': 16}}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.SqliteDatabaseContext:INSERT INTO myTable ( `key`, `firstName`, `lastName`, `age` ) VALUES( ?, ?, ?, ? )
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: None
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: db_query_lookup
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'table_name': 'myTable', 'cols': ['firstName', 'lastName', 'age'], 'conditions': []}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.SqliteDatabaseContext:SELECT `firstName`, `lastName`, `age` FROM myTable
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'lookups': [{'firstName': 'John', 'lastName': 'Doe', 'age': 16}]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: get_single
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': ['query_lookups']}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.GetSingle:output spec: [query_lookups]
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'query_lookups': [{'firstName': 'John', 'lastName': 'Doe', 'age': 16}]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
Finding tests in db_query_insert_or_update
DEBUG:dffml.SqliteDatabase:SqliteDatabaseConfig(filename='examples.db')
DEBUG:asyncio:Using selector: EpollSelector
DEBUG:dffml.MemoryInputNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationNetwork:MemoryOperationNetworkConfig(operations=[])
DEBUG:dffml.MemoryLockNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
DEBUG:dffml.MemoryRedundancyChecker:BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))
DEBUG:dffml.MemoryOrchestrator:MemoryOrchestratorConfig(input_network=MemoryInputNetwork(BaseConfig()), operation_network=MemoryOperationNetwork(MemoryOperationNetworkConfig(operations=[])), lock_network=MemoryLockNetwork(BaseConfig()), opimp_network=MemoryOperationImplementationNetwork(MemoryOperationImplementationNetworkConfig(operations={})), rchecker=MemoryRedundancyChecker(BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))))
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf13a38d0>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation db_query_insert_or_update(db_query_insert_or_update) with provided config DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='examples.db')))
DEBUG:dffml.DbQueryInsertOrUpdateImplementation:DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='examples.db')))
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation db_query_lookup(db_query_lookup) with provided config DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='examples.db')))
DEBUG:dffml.DbQueryLookupImplementation:DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='examples.db')))
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation get_single(get_single) with base config
DEBUG:dffml.GetSingleImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf13a38d0>: ({'insert_or_update': [Input(value=myTable, definition=query_table), Input(value={'key': 11, 'firstName': 'John', 'lastName': 'Wick', 'age': 38}, definition=query_data)], 'lookup': [Input(value=myTable, definition=query_table), Input(value=['firstName', 'lastName', 'age'], definition=query_cols), Input(value=[], definition=query_conditions), Input(value=['query_lookups'], definition=get_single_spec)]},)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=myTable, definition=query_table), Input(value={'key': 11, 'firstName': 'John', 'lastName': 'Wick', 'age': 38}, definition=query_data)]
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=myTable, definition=query_table), Input(value=['firstName', 'lastName', 'age'], definition=query_cols), Input(value=[], definition=query_conditions), Input(value=['query_lookups'], definition=get_single_spec)]
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: insert_or_update
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: lookup
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] db_query_insert_or_update
DEBUG:dffml.MemoryOrchestratorContext:[insert_or_update]: dispatch operation: db_query_insert_or_update
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] db_query_lookup
DEBUG:dffml.MemoryOrchestratorContext:[lookup]: dispatch operation: db_query_lookup
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: db_query_insert_or_update
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'table_name': 'myTable', 'data': {'key': 11, 'firstName': 'John', 'lastName': 'Wick', 'age': 38}}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.SqliteDatabaseContext:INSERT INTO myTable ( `key`, `firstName`, `lastName`, `age` ) VALUES( ?, ?, ?, ? )
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: None
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: db_query_lookup
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'table_name': 'myTable', 'cols': ['firstName', 'lastName', 'age'], 'conditions': []}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.SqliteDatabaseContext:SELECT `firstName`, `lastName`, `age` FROM myTable
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'lookups': [{'firstName': 'John', 'lastName': 'Doe', 'age': 16}, {'firstName': 'John', 'lastName': 'Wick', 'age': 38}]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: get_single
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': ['query_lookups']}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.GetSingle:output spec: [query_lookups]
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'query_lookups': [{'firstName': 'John', 'lastName': 'Doe', 'age': 16}, {'firstName': 'John', 'lastName': 'Wick', 'age': 38}]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
DEBUG:asyncio:Using selector: EpollSelector
DEBUG:dffml.MemoryInputNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationNetwork:MemoryOperationNetworkConfig(operations=[])
DEBUG:dffml.MemoryLockNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
DEBUG:dffml.MemoryRedundancyChecker:BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))
DEBUG:dffml.MemoryOrchestrator:MemoryOrchestratorConfig(input_network=MemoryInputNetwork(BaseConfig()), operation_network=MemoryOperationNetwork(MemoryOperationNetworkConfig(operations=[])), lock_network=MemoryLockNetwork(BaseConfig()), opimp_network=MemoryOperationImplementationNetwork(MemoryOperationImplementationNetworkConfig(operations={})), rchecker=MemoryRedundancyChecker(BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))))
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf13a38d0>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation db_query_insert_or_update(db_query_insert_or_update) with provided config DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='examples.db')))
DEBUG:dffml.DbQueryInsertOrUpdateImplementation:DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='examples.db')))
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation db_query_lookup(db_query_lookup) with provided config DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='examples.db')))
DEBUG:dffml.DbQueryLookupImplementation:DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='examples.db')))
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation get_single(get_single) with base config
DEBUG:dffml.GetSingleImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf13a38d0>: ({'insert_or_update': [Input(value=myTable, definition=query_table), Input(value={'key': 11, 'firstName': 'John', 'lastName': 'Wick', 'age': 39}, definition=query_data)], 'lookup': [Input(value=myTable, definition=query_table), Input(value=['firstName', 'lastName', 'age'], definition=query_cols), Input(value=[], definition=query_conditions), Input(value=['query_lookups'], definition=get_single_spec)]},)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=myTable, definition=query_table), Input(value={'key': 11, 'firstName': 'John', 'lastName': 'Wick', 'age': 39}, definition=query_data)]
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=myTable, definition=query_table), Input(value=['firstName', 'lastName', 'age'], definition=query_cols), Input(value=[], definition=query_conditions), Input(value=['query_lookups'], definition=get_single_spec)]
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: insert_or_update
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: lookup
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] db_query_insert_or_update
DEBUG:dffml.MemoryOrchestratorContext:[insert_or_update]: dispatch operation: db_query_insert_or_update
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] db_query_lookup
DEBUG:dffml.MemoryOrchestratorContext:[lookup]: dispatch operation: db_query_lookup
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: db_query_insert_or_update
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'table_name': 'myTable', 'data': {'key': 11, 'firstName': 'John', 'lastName': 'Wick', 'age': 39}}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.SqliteDatabaseContext:INSERT INTO myTable ( `key`, `firstName`, `lastName`, `age` ) VALUES( ?, ?, ?, ? )
DEBUG:dffml.SqliteDatabaseContext:UPDATE myTable SET `firstName` = ? ,`lastName` = ? ,`age` = ? WHERE ((`key` = ? ))
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: None
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: db_query_lookup
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'table_name': 'myTable', 'cols': ['firstName', 'lastName', 'age'], 'conditions': []}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.SqliteDatabaseContext:SELECT `firstName`, `lastName`, `age` FROM myTable
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'lookups': [{'firstName': 'John', 'lastName': 'Doe', 'age': 16}, {'firstName': 'John', 'lastName': 'Wick', 'age': 39}]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: get_single
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': ['query_lookups']}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.GetSingle:output spec: [query_lookups]
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'query_lookups': [{'firstName': 'John', 'lastName': 'Doe', 'age': 16}, {'firstName': 'John', 'lastName': 'Wick', 'age': 39}]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
Finding tests in db_query_lookup
DEBUG:dffml.SqliteDatabase:SqliteDatabaseConfig(filename='examples.db')
DEBUG:asyncio:Using selector: EpollSelector
DEBUG:dffml.MemoryInputNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationNetwork:MemoryOperationNetworkConfig(operations=[])
DEBUG:dffml.MemoryLockNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
DEBUG:dffml.MemoryRedundancyChecker:BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))
DEBUG:dffml.MemoryOrchestrator:MemoryOrchestratorConfig(input_network=MemoryInputNetwork(BaseConfig()), operation_network=MemoryOperationNetwork(MemoryOperationNetworkConfig(operations=[])), lock_network=MemoryLockNetwork(BaseConfig()), opimp_network=MemoryOperationImplementationNetwork(MemoryOperationImplementationNetworkConfig(operations={})), rchecker=MemoryRedundancyChecker(BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))))
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf1410350>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation db_query_lookup(db_query_lookup) with provided config DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='examples.db')))
DEBUG:dffml.DbQueryLookupImplementation:DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='examples.db')))
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation get_single(get_single) with base config
DEBUG:dffml.GetSingleImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf1410350>: ({'lookup': [Input(value=myTable, definition=query_table), Input(value=['firstName', 'lastName', 'age'], definition=query_cols), Input(value=[], definition=query_conditions), Input(value=['query_lookups'], definition=get_single_spec)]},)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=myTable, definition=query_table), Input(value=['firstName', 'lastName', 'age'], definition=query_cols), Input(value=[], definition=query_conditions), Input(value=['query_lookups'], definition=get_single_spec)]
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: lookup
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] db_query_lookup
DEBUG:dffml.MemoryOrchestratorContext:[lookup]: dispatch operation: db_query_lookup
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: db_query_lookup
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'table_name': 'myTable', 'cols': ['firstName', 'lastName', 'age'], 'conditions': []}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.SqliteDatabaseContext:SELECT `firstName`, `lastName`, `age` FROM myTable
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'lookups': [{'firstName': 'John', 'lastName': 'Doe', 'age': 16}, {'firstName': 'John', 'lastName': 'Wick', 'age': 39}]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: get_single
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': ['query_lookups']}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.GetSingle:output spec: [query_lookups]
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'query_lookups': [{'firstName': 'John', 'lastName': 'Doe', 'age': 16}, {'firstName': 'John', 'lastName': 'Wick', 'age': 39}]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
ok
test_docstring (tests.test_docstrings.operation_db_db_query_remove) ... DEBUG:asyncio:Using selector: EpollSelector
DEBUG:dffml.SqliteDatabase:SqliteDatabaseConfig(filename='examples.db')
DEBUG:dffml.MemoryInputNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationNetwork:MemoryOperationNetworkConfig(operations=[])
DEBUG:dffml.MemoryLockNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
DEBUG:dffml.MemoryRedundancyChecker:BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))
DEBUG:dffml.MemoryOrchestrator:MemoryOrchestratorConfig(input_network=MemoryInputNetwork(BaseConfig()), operation_network=MemoryOperationNetwork(MemoryOperationNetworkConfig(operations=[])), lock_network=MemoryLockNetwork(BaseConfig()), opimp_network=MemoryOperationImplementationNetwork(MemoryOperationImplementationNetworkConfig(operations={})), rchecker=MemoryRedundancyChecker(BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))))
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf16e3510>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation db_query_create(db_query_create_table) with provided config DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='examples.db')))
DEBUG:dffml.DbQueryCreateTableImplementation:DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='examples.db')))
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf16e3510>: ([Input(value=myTable, definition=query_table), Input(value={'key': 'INTEGER NOT NULL PRIMARY KEY', 'firstName': 'text', 'lastName': 'text', 'age': 'int'}, definition=query_cols)],)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=myTable, definition=query_table), Input(value={'key': 'INTEGER NOT NULL PRIMARY KEY', 'firstName': 'text', 'lastName': 'text', 'age': 'int'}, definition=query_cols)]
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: e847e589e937d5c44692432c23c758c02cf17b390d2455d842e283797fa5578a
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] db_query_create
DEBUG:dffml.MemoryOrchestratorContext:[e847e589e937d5c44692432c23c758c02cf17b390d2455d842e283797fa5578a]: dispatch operation: db_query_create
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: db_query_create
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'table_name': 'myTable', 'cols': {'key': 'INTEGER NOT NULL PRIMARY KEY', 'firstName': 'text', 'lastName': 'text', 'age': 'int'}}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.SqliteDatabaseContext:CREATE TABLE IF NOT EXISTS myTable (`key` INTEGER NOT NULL PRIMARY KEY, `firstName` text, `lastName` text, `age` int)
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: None
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
Finding tests in db_query_remove
DEBUG:dffml.SqliteDatabase:SqliteDatabaseConfig(filename='examples.db')
DEBUG:asyncio:Using selector: EpollSelector
DEBUG:dffml.MemoryInputNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationNetwork:MemoryOperationNetworkConfig(operations=[])
DEBUG:dffml.MemoryLockNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
DEBUG:dffml.MemoryRedundancyChecker:BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))
DEBUG:dffml.MemoryOrchestrator:MemoryOrchestratorConfig(input_network=MemoryInputNetwork(BaseConfig()), operation_network=MemoryOperationNetwork(MemoryOperationNetworkConfig(operations=[])), lock_network=MemoryLockNetwork(BaseConfig()), opimp_network=MemoryOperationImplementationNetwork(MemoryOperationImplementationNetworkConfig(operations={})), rchecker=MemoryRedundancyChecker(BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))))
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf1410510>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation db_query_lookup(db_query_lookup) with provided config DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='examples.db')))
DEBUG:dffml.DbQueryLookupImplementation:DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='examples.db')))
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation db_query_remove(db_query_remove) with provided config DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='examples.db')))
DEBUG:dffml.DbQueryRemoveImplementation:DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='examples.db')))
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation get_single(get_single) with base config
DEBUG:dffml.GetSingleImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf1410510>: ({'remove': [Input(value=myTable, definition=query_table), Input(value=[], definition=query_conditions)], 'lookup': [Input(value=myTable, definition=query_table), Input(value=['firstName', 'lastName', 'age'], definition=query_cols), Input(value=[], definition=query_conditions), Input(value=['query_lookups'], definition=get_single_spec)]},)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=myTable, definition=query_table), Input(value=[], definition=query_conditions)]
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=myTable, definition=query_table), Input(value=['firstName', 'lastName', 'age'], definition=query_cols), Input(value=[], definition=query_conditions), Input(value=['query_lookups'], definition=get_single_spec)]
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: remove
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: lookup
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] db_query_remove
DEBUG:dffml.MemoryOrchestratorContext:[remove]: dispatch operation: db_query_remove
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] db_query_lookup
DEBUG:dffml.MemoryOrchestratorContext:[lookup]: dispatch operation: db_query_lookup
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] db_query_remove
DEBUG:dffml.MemoryOrchestratorContext:[lookup]: dispatch operation: db_query_remove
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: db_query_remove
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'table_name': 'myTable', 'conditions': []}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.SqliteDatabaseContext:DELETE FROM myTable
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: None
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: db_query_lookup
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'table_name': 'myTable', 'cols': ['firstName', 'lastName', 'age'], 'conditions': []}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.SqliteDatabaseContext:SELECT `firstName`, `lastName`, `age` FROM myTable
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'lookups': []}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: db_query_remove
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'table_name': 'myTable', 'conditions': []}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.SqliteDatabaseContext:DELETE FROM myTable
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: None
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: get_single
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': ['query_lookups']}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.GetSingle:output spec: [query_lookups]
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'query_lookups': []}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
ok
test_docstring (tests.test_docstrings.operation_db_db_query_update) ... DEBUG:asyncio:Using selector: EpollSelector
DEBUG:dffml.SqliteDatabase:SqliteDatabaseConfig(filename='examples.db')
DEBUG:dffml.MemoryInputNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationNetwork:MemoryOperationNetworkConfig(operations=[])
DEBUG:dffml.MemoryLockNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
DEBUG:dffml.MemoryRedundancyChecker:BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))
DEBUG:dffml.MemoryOrchestrator:MemoryOrchestratorConfig(input_network=MemoryInputNetwork(BaseConfig()), operation_network=MemoryOperationNetwork(MemoryOperationNetworkConfig(operations=[])), lock_network=MemoryLockNetwork(BaseConfig()), opimp_network=MemoryOperationImplementationNetwork(MemoryOperationImplementationNetworkConfig(operations={})), rchecker=MemoryRedundancyChecker(BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))))
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf16e3ad0>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation db_query_create(db_query_create_table) with provided config DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='examples.db')))
DEBUG:dffml.DbQueryCreateTableImplementation:DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='examples.db')))
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf16e3ad0>: ([Input(value=myTable, definition=query_table), Input(value={'key': 'INTEGER NOT NULL PRIMARY KEY', 'firstName': 'text', 'lastName': 'text', 'age': 'int'}, definition=query_cols)],)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=myTable, definition=query_table), Input(value={'key': 'INTEGER NOT NULL PRIMARY KEY', 'firstName': 'text', 'lastName': 'text', 'age': 'int'}, definition=query_cols)]
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: a30739121dd6600913759eb687b7d855cc3b40a906c28769ced9df41377d8eb3
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] db_query_create
DEBUG:dffml.MemoryOrchestratorContext:[a30739121dd6600913759eb687b7d855cc3b40a906c28769ced9df41377d8eb3]: dispatch operation: db_query_create
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: db_query_create
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'table_name': 'myTable', 'cols': {'key': 'INTEGER NOT NULL PRIMARY KEY', 'firstName': 'text', 'lastName': 'text', 'age': 'int'}}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.SqliteDatabaseContext:CREATE TABLE IF NOT EXISTS myTable (`key` INTEGER NOT NULL PRIMARY KEY, `firstName` text, `lastName` text, `age` int)
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: None
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
Finding tests in db_query_insert
DEBUG:dffml.SqliteDatabase:SqliteDatabaseConfig(filename='examples.db')
DEBUG:asyncio:Using selector: EpollSelector
DEBUG:dffml.MemoryInputNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationNetwork:MemoryOperationNetworkConfig(operations=[])
DEBUG:dffml.MemoryLockNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
DEBUG:dffml.MemoryRedundancyChecker:BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))
DEBUG:dffml.MemoryOrchestrator:MemoryOrchestratorConfig(input_network=MemoryInputNetwork(BaseConfig()), operation_network=MemoryOperationNetwork(MemoryOperationNetworkConfig(operations=[])), lock_network=MemoryLockNetwork(BaseConfig()), opimp_network=MemoryOperationImplementationNetwork(MemoryOperationImplementationNetworkConfig(operations={})), rchecker=MemoryRedundancyChecker(BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))))
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf14102d0>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation db_query_insert(db_query_insert) with provided config DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='examples.db')))
DEBUG:dffml.DbQueryInsertImplementation:DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='examples.db')))
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation db_query_lookup(db_query_lookup) with provided config DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='examples.db')))
DEBUG:dffml.DbQueryLookupImplementation:DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='examples.db')))
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation get_single(get_single) with base config
DEBUG:dffml.GetSingleImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf14102d0>: ({'insert': [Input(value=myTable, definition=query_table), Input(value={'key': 10, 'firstName': 'John', 'lastName': 'Doe', 'age': 16}, definition=query_data)], 'lookup': [Input(value=myTable, definition=query_table), Input(value=['firstName', 'lastName', 'age'], definition=query_cols), Input(value=[], definition=query_conditions), Input(value=['query_lookups'], definition=get_single_spec)]},)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=myTable, definition=query_table), Input(value={'key': 10, 'firstName': 'John', 'lastName': 'Doe', 'age': 16}, definition=query_data)]
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=myTable, definition=query_table), Input(value=['firstName', 'lastName', 'age'], definition=query_cols), Input(value=[], definition=query_conditions), Input(value=['query_lookups'], definition=get_single_spec)]
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: insert
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: lookup
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] db_query_insert
DEBUG:dffml.MemoryOrchestratorContext:[insert]: dispatch operation: db_query_insert
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] db_query_lookup
DEBUG:dffml.MemoryOrchestratorContext:[lookup]: dispatch operation: db_query_lookup
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: db_query_insert
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'table_name': 'myTable', 'data': {'key': 10, 'firstName': 'John', 'lastName': 'Doe', 'age': 16}}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.SqliteDatabaseContext:INSERT INTO myTable ( `key`, `firstName`, `lastName`, `age` ) VALUES( ?, ?, ?, ? )
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: None
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: db_query_lookup
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'table_name': 'myTable', 'cols': ['firstName', 'lastName', 'age'], 'conditions': []}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.SqliteDatabaseContext:SELECT `firstName`, `lastName`, `age` FROM myTable
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'lookups': [{'firstName': 'John', 'lastName': 'Doe', 'age': 16}]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: get_single
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': ['query_lookups']}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.GetSingle:output spec: [query_lookups]
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'query_lookups': [{'firstName': 'John', 'lastName': 'Doe', 'age': 16}]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
Finding tests in db_query_update
DEBUG:dffml.SqliteDatabase:SqliteDatabaseConfig(filename='examples.db')
DEBUG:asyncio:Using selector: EpollSelector
DEBUG:dffml.MemoryInputNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationNetwork:MemoryOperationNetworkConfig(operations=[])
DEBUG:dffml.MemoryLockNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
DEBUG:dffml.MemoryRedundancyChecker:BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))
DEBUG:dffml.MemoryOrchestrator:MemoryOrchestratorConfig(input_network=MemoryInputNetwork(BaseConfig()), operation_network=MemoryOperationNetwork(MemoryOperationNetworkConfig(operations=[])), lock_network=MemoryLockNetwork(BaseConfig()), opimp_network=MemoryOperationImplementationNetwork(MemoryOperationImplementationNetworkConfig(operations={})), rchecker=MemoryRedundancyChecker(BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))))
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf1410690>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation db_query_update(db_query_update) with provided config DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='examples.db')))
DEBUG:dffml.DbQueryUpdateImplementation:DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='examples.db')))
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation db_query_lookup(db_query_lookup) with provided config DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='examples.db')))
DEBUG:dffml.DbQueryLookupImplementation:DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='examples.db')))
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation get_single(get_single) with base config
DEBUG:dffml.GetSingleImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf1410690>: ({'update': [Input(value=myTable, definition=query_table), Input(value={'key': 10, 'firstName': 'John', 'lastName': 'Doe', 'age': 17}, definition=query_data), Input(value=[], definition=query_conditions)], 'lookup': [Input(value=myTable, definition=query_table), Input(value=['firstName', 'lastName', 'age'], definition=query_cols), Input(value=[], definition=query_conditions), Input(value=['query_lookups'], definition=get_single_spec)]},)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=myTable, definition=query_table), Input(value={'key': 10, 'firstName': 'John', 'lastName': 'Doe', 'age': 17}, definition=query_data), Input(value=[], definition=query_conditions)]
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=myTable, definition=query_table), Input(value=['firstName', 'lastName', 'age'], definition=query_cols), Input(value=[], definition=query_conditions), Input(value=['query_lookups'], definition=get_single_spec)]
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: update
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: lookup
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] db_query_update
DEBUG:dffml.MemoryOrchestratorContext:[update]: dispatch operation: db_query_update
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] db_query_lookup
DEBUG:dffml.MemoryOrchestratorContext:[lookup]: dispatch operation: db_query_lookup
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: db_query_update
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'table_name': 'myTable', 'data': {'key': 10, 'firstName': 'John', 'lastName': 'Doe', 'age': 17}, 'conditions': []}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.SqliteDatabaseContext:UPDATE myTable SET `key` = ? ,`firstName` = ? ,`lastName` = ? ,`age` = ?
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: None
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: db_query_lookup
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'table_name': 'myTable', 'cols': ['firstName', 'lastName', 'age'], 'conditions': []}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.SqliteDatabaseContext:SELECT `firstName`, `lastName`, `age` FROM myTable
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'lookups': [{'firstName': 'John', 'lastName': 'Doe', 'age': 17}]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: get_single
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': ['query_lookups']}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.GetSingle:output spec: [query_lookups]
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'query_lookups': [{'firstName': 'John', 'lastName': 'Doe', 'age': 17}]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
ok
test_docstring (tests.test_docstrings.operation_io_AcceptUserInput) ... Finding tests in AcceptUserInput
DEBUG:asyncio:Using selector: EpollSelector
DEBUG:dffml.MemoryInputNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationNetwork:MemoryOperationNetworkConfig(operations=[])
DEBUG:dffml.MemoryLockNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
DEBUG:dffml.MemoryRedundancyChecker:BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))
DEBUG:dffml.MemoryOrchestrator:MemoryOrchestratorConfig(input_network=MemoryInputNetwork(BaseConfig()), operation_network=MemoryOperationNetwork(MemoryOperationNetworkConfig(operations=[])), lock_network=MemoryLockNetwork(BaseConfig()), opimp_network=MemoryOperationImplementationNetwork(MemoryOperationImplementationNetworkConfig(operations={})), rchecker=MemoryRedundancyChecker(BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))))
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf139edd0>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation AcceptUserInput(AcceptUserInput) with base config
DEBUG:dffml.AcceptUserInput:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation get_single(get_single) with base config
DEBUG:dffml.GetSingleImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf139edd0>: ({'input': []},)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: []
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: input
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: AcceptUserInput
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'InputData': 'Data flow is awesome'}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: get_single
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': ['UserInput']}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.GetSingle:output spec: [UserInput]
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'UserInput': 'Data flow is awesome'}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
ok
test_docstring (tests.test_docstrings.operation_io_print_output) ... Finding tests in print_output
DEBUG:asyncio:Using selector: EpollSelector
DEBUG:dffml.MemoryInputNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationNetwork:MemoryOperationNetworkConfig(operations=[])
DEBUG:dffml.MemoryLockNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
DEBUG:dffml.MemoryRedundancyChecker:BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))
DEBUG:dffml.MemoryOrchestrator:MemoryOrchestratorConfig(input_network=MemoryInputNetwork(BaseConfig()), operation_network=MemoryOperationNetwork(MemoryOperationNetworkConfig(operations=[])), lock_network=MemoryLockNetwork(BaseConfig()), opimp_network=MemoryOperationImplementationNetwork(MemoryOperationImplementationNetworkConfig(operations={})), rchecker=MemoryRedundancyChecker(BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))))
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf13a3e90>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation print_output(print_output) with base config
DEBUG:dffml.PrintOutputImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf13a3e90>: ([Input(value=print_output example, definition=DataToPrint)],)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=print_output example, definition=DataToPrint)]
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: c541d224cbc043dc2ed6a7e0f43294b21c9d1071415315fc279252b84778a835
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] print_output
DEBUG:dffml.MemoryOrchestratorContext:[c541d224cbc043dc2ed6a7e0f43294b21c9d1071415315fc279252b84778a835]: dispatch operation: print_output
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: print_output
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'data': 'print_output example'}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: None
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
ok
test_docstring (tests.test_docstrings.operation_mapping_create_mapping) ... Finding tests in create_mapping
DEBUG:asyncio:Using selector: EpollSelector
DEBUG:dffml.MemoryInputNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationNetwork:MemoryOperationNetworkConfig(operations=[])
DEBUG:dffml.MemoryLockNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
DEBUG:dffml.MemoryRedundancyChecker:BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))
DEBUG:dffml.MemoryOrchestrator:MemoryOrchestratorConfig(input_network=MemoryInputNetwork(BaseConfig()), operation_network=MemoryOperationNetwork(MemoryOperationNetworkConfig(operations=[])), lock_network=MemoryLockNetwork(BaseConfig()), opimp_network=MemoryOperationImplementationNetwork(MemoryOperationImplementationNetworkConfig(operations={})), rchecker=MemoryRedundancyChecker(BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))))
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf13aad90>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation dffml.mapping.create(dffml.mapping.create) with base config
DEBUG:dffml.DffmlMappingCreateImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation get_single(get_single) with base config
DEBUG:dffml.GetSingleImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf13aad90>: ([Input(value=key1, definition=key), Input(value=42, definition=value)],)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=key1, definition=key), Input(value=42, definition=value)]
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: 70dd429e7af8896452552deede46959a3d6bcedefea18e9d33164c81c9e4ec2c
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] dffml.mapping.create
DEBUG:dffml.MemoryOrchestratorContext:[70dd429e7af8896452552deede46959a3d6bcedefea18e9d33164c81c9e4ec2c]: dispatch operation: dffml.mapping.create
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: dffml.mapping.create
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'key': 'key1', 'value': 42}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'mapping': {'key1': 42}}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: get_single
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': ['mapping']}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.GetSingle:output spec: [mapping]
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'mapping': {'key1': 42}}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
ok
test_docstring (tests.test_docstrings.operation_mapping_mapping_extract_value) ... Finding tests in mapping_extract_value
DEBUG:asyncio:Using selector: EpollSelector
DEBUG:dffml.MemoryInputNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationNetwork:MemoryOperationNetworkConfig(operations=[])
DEBUG:dffml.MemoryLockNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
DEBUG:dffml.MemoryRedundancyChecker:BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))
DEBUG:dffml.MemoryOrchestrator:MemoryOrchestratorConfig(input_network=MemoryInputNetwork(BaseConfig()), operation_network=MemoryOperationNetwork(MemoryOperationNetworkConfig(operations=[])), lock_network=MemoryLockNetwork(BaseConfig()), opimp_network=MemoryOperationImplementationNetwork(MemoryOperationImplementationNetworkConfig(operations={})), rchecker=MemoryRedundancyChecker(BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))))
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf13aa690>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation dffml.mapping.extract(dffml.mapping.extract) with base config
DEBUG:dffml.DffmlMappingExtractImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation get_single(get_single) with base config
DEBUG:dffml.GetSingleImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf13aa690>: ([Input(value={'key1': {'key2': 42}}, definition=mapping), Input(value=['key1', 'key2'], definition=mapping_traverse)],)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value={'key1': {'key2': 42}}, definition=mapping), Input(value=['key1', 'key2'], definition=mapping_traverse)]
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: 831bc722cb292800a31ac6ff5669a980ba48dda80c4e6133402c12a859c8ac7a
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] dffml.mapping.extract
DEBUG:dffml.MemoryOrchestratorContext:[831bc722cb292800a31ac6ff5669a980ba48dda80c4e6133402c12a859c8ac7a]: dispatch operation: dffml.mapping.extract
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: dffml.mapping.extract
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'mapping': {'key1': {'key2': 42}}, 'traverse': ['key1', 'key2']}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'value': 42}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: get_single
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': ['value']}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.GetSingle:output spec: [value]
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'value': 42}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
ok
test_docstring (tests.test_docstrings.operation_math_multiply) ... Finding tests in multiply
DEBUG:asyncio:Using selector: EpollSelector
DEBUG:dffml.MemoryInputNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationNetwork:MemoryOperationNetworkConfig(operations=[])
DEBUG:dffml.MemoryLockNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
DEBUG:dffml.MemoryRedundancyChecker:BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))
DEBUG:dffml.MemoryOrchestrator:MemoryOrchestratorConfig(input_network=MemoryInputNetwork(BaseConfig()), operation_network=MemoryOperationNetwork(MemoryOperationNetworkConfig(operations=[])), lock_network=MemoryLockNetwork(BaseConfig()), opimp_network=MemoryOperationImplementationNetwork(MemoryOperationImplementationNetworkConfig(operations={})), rchecker=MemoryRedundancyChecker(BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))))
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf13aa650>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation multiply(multiply) with base config
DEBUG:dffml.MultiplyImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation get_single(get_single) with base config
DEBUG:dffml.GetSingleImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf13aa650>: ([Input(value=12, definition=multiplicand_def), Input(value=3, definition=multiplier_def)],)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=12, definition=multiplicand_def), Input(value=3, definition=multiplier_def)]
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: 5e418c1a77d8a7de969d03a2bec953a66611bb3b600dad19b6c9cfcd4332a57f
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] multiply
DEBUG:dffml.MemoryOrchestratorContext:[5e418c1a77d8a7de969d03a2bec953a66611bb3b600dad19b6c9cfcd4332a57f]: dispatch operation: multiply
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: multiply
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'multiplicand': 12, 'multiplier': 3}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'product': 36}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: get_single
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': ['product']}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.GetSingle:output spec: [product]
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'product': 36}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
ok
test_docstring (tests.test_docstrings.operation_model_model_predict) ... Finding tests in model_predict
DEBUG:dffml.SLRModel:SLRModelConfig(predict=Salary(Feature)[<class 'int'>, 1], features=[Years(Feature)[<class 'int'>, 1]], directory=PosixPath('~/.cache/dffml/slr'))
DEBUG:asyncio:Using selector: EpollSelector
DEBUG:dffml.MemorySource:MemorySourceConfig(records=[{'key': 0, 'features': {'Years': 0, 'Salary': 10}, 'extra': {}}, {'key': 1, 'features': {'Years': 1, 'Salary': 20}, 'extra': {}}, {'key': 2, 'features': {'Years': 2, 'Salary': 30}, 'extra': {}}, {'key': 3, 'features': {'Years': 3, 'Salary': 40}, 'extra': {}}])
DEBUG:dffml.util.AsyncContextManagerList.Sources:Entering: MemorySource(MemorySourceConfig(records=[{'key': 0, 'features': {'Years': 0, 'Salary': 10}, 'extra': {}}, {'key': 1, 'features': {'Years': 1, 'Salary': 20}, 'extra': {}}, {'key': 2, 'features': {'Years': 2, 'Salary': 30}, 'extra': {}}, {'key': 3, 'features': {'Years': 3, 'Salary': 40}, 'extra': {}}]))
DEBUG:dffml.SLRModel:Loaded model from /tmp/tmp.tPXahPUaJ4/.cache/dffml/slr/90e67083b95825495dc0d2ec594f021e5aa9a5a6e9ddf30907a54b4d8a53e69f23911a1adb02d8679df84da8cf073721.json
DEBUG:dffml.util.AsyncContextManagerListContext.SourcesContext:Entering context: <dffml.source.memory.MemorySourceContext object at 0x7f6cf13a1750>
DEBUG:dffml.SLRModel:Number of input records: 4
DEBUG:dffml.SLRModel:Saved model to /tmp/tmp.tPXahPUaJ4/.cache/dffml/slr/90e67083b95825495dc0d2ec594f021e5aa9a5a6e9ddf30907a54b4d8a53e69f23911a1adb02d8679df84da8cf073721.json
DEBUG:dffml.MemoryInputNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationNetwork:MemoryOperationNetworkConfig(operations=[])
DEBUG:dffml.MemoryLockNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
DEBUG:dffml.MemoryRedundancyChecker:BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))
DEBUG:dffml.MemoryOrchestrator:MemoryOrchestratorConfig(input_network=MemoryInputNetwork(BaseConfig()), operation_network=MemoryOperationNetwork(MemoryOperationNetworkConfig(operations=[])), lock_network=MemoryLockNetwork(BaseConfig()), opimp_network=MemoryOperationImplementationNetwork(MemoryOperationImplementationNetworkConfig(operations={})), rchecker=MemoryRedundancyChecker(BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))))
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf13a6110>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation prediction_using_model(dffml.model.predict) with provided config ModelPredictConfig(model=SLRModel(SLRModelConfig(predict=Salary(Feature)[<class 'int'>, 1], features=[Years(Feature)[<class 'int'>, 1]], directory=PosixPath('/tmp/tmp.tPXahPUaJ4/.cache/dffml/slr'))))
DEBUG:dffml.DffmlModelPredictImplementation:ModelPredictConfig(model=SLRModel(SLRModelConfig(predict=Salary(Feature)[<class 'int'>, 1], features=[Years(Feature)[<class 'int'>, 1]], directory=PosixPath('/tmp/tmp.tPXahPUaJ4/.cache/dffml/slr'))))
DEBUG:dffml.SLRModel:Loaded model from /tmp/tmp.tPXahPUaJ4/.cache/dffml/slr/90e67083b95825495dc0d2ec594f021e5aa9a5a6e9ddf30907a54b4d8a53e69f23911a1adb02d8679df84da8cf073721.json
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation get_single(get_single) with base config
DEBUG:dffml.GetSingleImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf13a6110>: ([Input(value={'Years': 4}, definition=record_features)],)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value={'Years': 4}, definition=record_features)]
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: 0d5670334b641d7ddb4068443c8d6d4b38a4ae22359c72d1a343ac7197f19e3d
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] prediction_using_model
DEBUG:dffml.MemoryOrchestratorContext:[0d5670334b641d7ddb4068443c8d6d4b38a4ae22359c72d1a343ac7197f19e3d]: dispatch operation: prediction_using_model
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: prediction_using_model
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'features': {'Years': 4}}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'prediction': {'Salary': {'confidence': 1.0, 'value': 50.0}}}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: get_single
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': ['model_predictions']}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.GetSingle:output spec: [model_predictions]
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'model_predictions': {'Salary': {'confidence': 1.0, 'value': 50.0}}}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
DEBUG:dffml.SLRModel:Saved model to /tmp/tmp.tPXahPUaJ4/.cache/dffml/slr/90e67083b95825495dc0d2ec594f021e5aa9a5a6e9ddf30907a54b4d8a53e69f23911a1adb02d8679df84da8cf073721.json
ok
test_docstring (tests.test_docstrings.operation_output_AssociateDefinition) ... Finding tests in AssociateDefinition
DEBUG:asyncio:Using selector: EpollSelector
DEBUG:dffml.MemoryInputNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationNetwork:MemoryOperationNetworkConfig(operations=[])
DEBUG:dffml.MemoryLockNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
DEBUG:dffml.MemoryRedundancyChecker:BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))
DEBUG:dffml.MemoryOrchestrator:MemoryOrchestratorConfig(input_network=MemoryInputNetwork(BaseConfig()), operation_network=MemoryOperationNetwork(MemoryOperationNetworkConfig(operations=[])), lock_network=MemoryLockNetwork(BaseConfig()), opimp_network=MemoryOperationImplementationNetwork(MemoryOperationImplementationNetworkConfig(operations={})), rchecker=MemoryRedundancyChecker(BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))))
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf13a1ed0>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation associate_definition(associate_definition) with base config
DEBUG:dffml.AssociateDefinitionImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf13a1ed0>: ([Input(value=my favorite value, definition=feed), Input(value=face, definition=output), Input(value=my second favorite value, definition=dead), Input(value=beef, definition=output), Input(value={'feed': 'output'}, definition=associate_spec)],)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=my favorite value, definition=feed), Input(value=face, definition=output), Input(value=my second favorite value, definition=dead), Input(value=beef, definition=output), Input(value={'feed': 'output'}, definition=associate_spec)]
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: 056ac7f87b2426b8cd79b4a7f1a5433dd29b0729be42731e816d0d71a8b9b289
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: associate_definition
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': {'feed': 'output'}}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'feed': 'face'}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
DEBUG:dffml.MemoryInputNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationNetwork:MemoryOperationNetworkConfig(operations=[])
DEBUG:dffml.MemoryLockNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
DEBUG:dffml.MemoryRedundancyChecker:BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))
DEBUG:dffml.MemoryOrchestrator:MemoryOrchestratorConfig(input_network=MemoryInputNetwork(BaseConfig()), operation_network=MemoryOperationNetwork(MemoryOperationNetworkConfig(operations=[])), lock_network=MemoryLockNetwork(BaseConfig()), opimp_network=MemoryOperationImplementationNetwork(MemoryOperationImplementationNetworkConfig(operations={})), rchecker=MemoryRedundancyChecker(BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))))
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf13a1ed0>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation associate_definition(associate_definition) with base config
DEBUG:dffml.AssociateDefinitionImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf13a1ed0>: ([Input(value=my favorite value, definition=feed), Input(value=face, definition=output), Input(value=my second favorite value, definition=dead), Input(value=beef, definition=output), Input(value={'dead': 'output'}, definition=associate_spec)],)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=my favorite value, definition=feed), Input(value=face, definition=output), Input(value=my second favorite value, definition=dead), Input(value=beef, definition=output), Input(value={'dead': 'output'}, definition=associate_spec)]
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: 0e9cb6f5e717c423750ee311af69e375e5473ffe3aa3cdd687a67aeae0a80a76
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: associate_definition
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': {'dead': 'output'}}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'dead': 'beef'}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
ok
test_docstring (tests.test_docstrings.operation_output_GetMulti) ... Finding tests in GetMulti
DEBUG:asyncio:Using selector: EpollSelector
DEBUG:dffml.MemoryInputNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationNetwork:MemoryOperationNetworkConfig(operations=[])
DEBUG:dffml.MemoryLockNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
DEBUG:dffml.MemoryRedundancyChecker:BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))
DEBUG:dffml.MemoryOrchestrator:MemoryOrchestratorConfig(input_network=MemoryInputNetwork(BaseConfig()), operation_network=MemoryOperationNetwork(MemoryOperationNetworkConfig(operations=[])), lock_network=MemoryLockNetwork(BaseConfig()), opimp_network=MemoryOperationImplementationNetwork(MemoryOperationImplementationNetworkConfig(operations={})), rchecker=MemoryRedundancyChecker(BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))))
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf13a1ed0>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation get_multi(get_multi) with base config
DEBUG:dffml.GetMultiImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf13a1ed0>: ([Input(value=https://github.com/intel/dffml, definition=URL), Input(value=https://github.com/intel/cve-bin-tool, definition=URL)],)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=https://github.com/intel/dffml, definition=URL), Input(value=https://github.com/intel/cve-bin-tool, definition=URL)]
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: 9199d9eb83065d757862faf418aacd1548e508251e75cc453a670ffbf95723fa
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: get_multi
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': ['URL']}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.GetMulti:output spec: [URL]
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'URL': ['https://github.com/intel/dffml', 'https://github.com/intel/cve-bin-tool']}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
ok
test_docstring (tests.test_docstrings.operation_output_GetSingle) ... Finding tests in GetSingle
DEBUG:asyncio:Using selector: EpollSelector
DEBUG:dffml.MemoryInputNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationNetwork:MemoryOperationNetworkConfig(operations=[])
DEBUG:dffml.MemoryLockNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
DEBUG:dffml.MemoryRedundancyChecker:BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))
DEBUG:dffml.MemoryOrchestrator:MemoryOrchestratorConfig(input_network=MemoryInputNetwork(BaseConfig()), operation_network=MemoryOperationNetwork(MemoryOperationNetworkConfig(operations=[])), lock_network=MemoryLockNetwork(BaseConfig()), opimp_network=MemoryOperationImplementationNetwork(MemoryOperationImplementationNetworkConfig(operations={})), rchecker=MemoryRedundancyChecker(BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))))
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf13a10d0>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation get_single(get_single) with base config
DEBUG:dffml.GetSingleImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf13a10d0>: ([Input(value=https://github.com/intel/dffml, definition=URL)],)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=https://github.com/intel/dffml, definition=URL)]
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: 2b1cac1eef82497d7265deb97deb99f327afb4797e84f6fb6342d20b880cef5f
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: get_single
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': ['URL']}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.GetSingle:output spec: [URL]
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'URL': 'https://github.com/intel/dffml'}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
ok
test_docstring (tests.test_docstrings.operation_preprocess_literal_eval) ... Finding tests in literal_eval
DEBUG:asyncio:Using selector: EpollSelector
DEBUG:dffml.MemoryInputNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationNetwork:MemoryOperationNetworkConfig(operations=[])
DEBUG:dffml.MemoryLockNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
DEBUG:dffml.MemoryRedundancyChecker:BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))
DEBUG:dffml.MemoryOrchestrator:MemoryOrchestratorConfig(input_network=MemoryInputNetwork(BaseConfig()), operation_network=MemoryOperationNetwork(MemoryOperationNetworkConfig(operations=[])), lock_network=MemoryLockNetwork(BaseConfig()), opimp_network=MemoryOperationImplementationNetwork(MemoryOperationImplementationNetworkConfig(operations={})), rchecker=MemoryRedundancyChecker(BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))))
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf13a1a50>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation literal_eval(literal_eval) with base config
DEBUG:dffml.LiteralEvalImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation get_single(get_single) with base config
DEBUG:dffml.GetSingleImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf13a1a50>: ([Input(value=[1,2,3], definition=InputStr)],)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=[1,2,3], definition=InputStr)]
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: a179711f523375a73dc83692c3b8fc967561873e95803ef534a8514094c06fa3
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] literal_eval
DEBUG:dffml.MemoryOrchestratorContext:[a179711f523375a73dc83692c3b8fc967561873e95803ef534a8514094c06fa3]: dispatch operation: literal_eval
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: literal_eval
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'str_to_eval': '[1,2,3]'}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'str_after_eval': [1, 2, 3]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: get_single
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': ['EvaluatedStr']}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.GetSingle:output spec: [EvaluatedStr]
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'EvaluatedStr': [1, 2, 3]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
ok
test_docstring (tests.test_docstrings.record_Record_Record_evaluated) ... Finding tests in Record.evaluated
INFO:dffml.record:Evaluated example {'dead': 'beef', 'feed': 'face'}
INFO:dffml.record:Evaluated example {'new': 'feature'}
ok
test_docstring (tests.test_docstrings.record_Record_Record_feature) ... Finding tests in Record.feature
ok
test_docstring (tests.test_docstrings.record_Record_Record_features) ... Finding tests in Record.features
ok
test_docstring (tests.test_docstrings.record_Record_Record_predicted) ... Finding tests in Record.predicted
ok
test_docstring (tests.test_docstrings.record_Record_Record_prediction) ... Finding tests in Record.prediction
ok
test_docstring (tests.test_docstrings.record_Record_Record_predictions) ... Finding tests in Record.predictions
ok
test_docstring (tests.test_docstrings.source_db_DbSourceContext_DbSourceContext_record) ... Finding tests in DbSourceContext.record
ok
test_docstring (tests.test_docstrings.source_db_DbSourceContext_DbSourceContext_records) ... Finding tests in DbSourceContext.records
ok
test_docstring (tests.test_docstrings.source_db_DbSourceContext_DbSourceContext_update) ... Finding tests in DbSourceContext.update
ok
test_docstring (tests.test_docstrings.source_df_DataFlowSourceContext_DataFlowSourceContext_records) ... Finding tests in DataFlowSourceContext.records
ok
test_docstring (tests.test_docstrings.source_df_DataFlowSourceContext_DataFlowSourceContext_update) ... Finding tests in DataFlowSourceContext.update
ok
test_docstring (tests.test_docstrings.source_memory_MemorySourceContext_MemorySourceContext_record) ... Finding tests in MemorySourceContext.record
ok
test_docstring (tests.test_docstrings.source_memory_MemorySourceContext_MemorySourceContext_records) ... Finding tests in MemorySourceContext.records
ok
test_docstring (tests.test_docstrings.source_memory_MemorySourceContext_MemorySourceContext_update) ... Finding tests in MemorySourceContext.update
ok
test_docstring (tests.test_docstrings.source_source_BaseSourceContext_BaseSourceContext_record) ... Finding tests in BaseSourceContext.record
DEBUG:asyncio:Using selector: EpollSelector
DEBUG:dffml.MemorySource:MemorySourceConfig(records=[{'key': 'example', 'features': {'dead': 'beef'}, 'extra': {}}])
ok
test_docstring (tests.test_docstrings.source_source_BaseSourceContext_BaseSourceContext_records) ... Finding tests in BaseSourceContext.records
DEBUG:asyncio:Using selector: EpollSelector
DEBUG:dffml.MemorySource:MemorySourceConfig(records=[{'key': 'example', 'features': {'dead': 'beef'}, 'extra': {}}])
ok
test_docstring (tests.test_docstrings.source_source_BaseSourceContext_BaseSourceContext_update) ... Finding tests in BaseSourceContext.update
DEBUG:asyncio:Using selector: EpollSelector
DEBUG:dffml.MemorySource:MemorySourceConfig(records=[])
ok
test_docstring (tests.test_docstrings.feature_feature_Features_Feature) ... Finding tests in Feature
ok
test_docstring (tests.test_docstrings.util_asynctestcase_AsyncExitStackTestCase) ... Finding tests in AsyncExitStackTestCase
ok
test_docstring (tests.test_docstrings.util_asynctestcase_AsyncTestCase) ... Finding tests in AsyncTestCase
ok
test_docstring (tests.test_docstrings.util_asynctestcase_IntegrationCLITestCase) ... Finding tests in IntegrationCLITestCase
ok
test_docstring (tests.test_docstrings.util_data_explore_directories) ... Finding tests in explore_directories
ok
test_docstring (tests.test_docstrings.util_data_parser_helper) ... Finding tests in parser_helper
ok
test_docstring (tests.test_docstrings.util_data_traverse_config_get) ... Finding tests in traverse_config_get
ok
test_docstring (tests.test_docstrings.util_data_traverse_config_set) ... Finding tests in traverse_config_set
ok
test_docstring (tests.test_docstrings.util_data_traverse_get) ... Finding tests in traverse_get
ok
test_docstring (tests.test_docstrings.util_entrypoint_base_entry_point) ... Finding tests in base_entry_point
ok
test_docstring (tests.test_docstrings.util_entrypoint_entrypoint) ... Finding tests in entrypoint
ok
test_docstring (tests.test_docstrings.util_net_cached_download) ... Finding tests in cached_download
DEBUG:asyncio:Using selector: EpollSelector
ERROR
test_docstring (tests.test_docstrings.util_net_cached_download_unpack_archive) ... Finding tests in cached_download_unpack_archive
DEBUG:asyncio:Using selector: EpollSelector
ERROR
test_run (tests.test_cli.TestAccuracy) ... DEBUG:dffml.JSONSource:FileSourceConfig(filename='/tmp/tmp5brsgdzc/0.8743837286019523', tag='untagged', readwrite=True, allowempty=True)
DEBUG:dffml.JSONSource:PosixPath('/tmp/tmp5brsgdzc/0.8743837286019523') is not a file, initializing memory to empty dict
DEBUG:dffml.JSONSource:/tmp/tmp5brsgdzc/0.8743837286019523 first open
DEBUG:dffml.JSONSource:/tmp/tmp5brsgdzc/0.8743837286019523 updated
DEBUG:dffml.JSONSource:/tmp/tmp5brsgdzc/0.8743837286019523 written
DEBUG:dffml.source.json:JSONSource(FileSourceConfig(filename=PosixPath('/tmp/tmp5brsgdzc/0.8743837286019523'), tag='untagged', readwrite=True, allowempty=True)) saved 10 records
DEBUG:dffml.cli.ml.Accuracy:Setting log = 20
DEBUG:dffml.cli.ml.Accuracy:Setting model = <class 'tests.test_cli.FakeModel'>
DEBUG:dffml.cli.ml.Accuracy:Setting sources = [<class 'dffml.base.JSONSource'>]
DEBUG:dffml.JSONSource:JSONSourceConfig(filename='/tmp/tmp5brsgdzc/0.8743837286019523', tag='untagged', readwrite=False, allowempty=False)
DEBUG:dffml.FakeModel:FakeConfig(features=[fake(Feature)[<class 'int'>, 1]], predict=fake(Feature)[<class 'int'>, 1], directory='/tmp/tmp.tPXahPUaJ4/.cache/dffml/test_cli/fake')
DEBUG:dffml.cli.ml.Accuracy:Ignored log
DEBUG:dffml.cli.ml.Accuracy:Ignored model
DEBUG:dffml.cli.ml.Accuracy:Ignored sources
DEBUG:dffml.util.AsyncContextManagerList.Sources:Entering: JSONSource(JSONSourceConfig(filename=PosixPath('/tmp/tmp5brsgdzc/0.8743837286019523'), tag='untagged', readwrite=False, allowempty=False))
DEBUG:dffml.JSONSource:/tmp/tmp5brsgdzc/0.8743837286019523 first open
DEBUG:dffml.source.json:JSONSource(JSONSourceConfig(filename=PosixPath('/tmp/tmp5brsgdzc/0.8743837286019523'), tag='untagged', readwrite=False, allowempty=False)) loaded 10 records
DEBUG:dffml.util.AsyncContextManagerListContext.SourcesContext:Entering context: <dffml.source.memory.MemorySourceContext object at 0x7f6cf12a5ad0>
ok
test_run (tests.test_cli.TestDataflowRunAllRecords) ... DEBUG:dffml.JSONSource:FileSourceConfig(filename='/tmp/tmpnaxfsw3j/0.29422502264853956', tag='untagged', readwrite=True, allowempty=True)
DEBUG:dffml.JSONSource:PosixPath('/tmp/tmpnaxfsw3j/0.29422502264853956') is not a file, initializing memory to empty dict
DEBUG:dffml.JSONSource:/tmp/tmpnaxfsw3j/0.29422502264853956 first open
DEBUG:dffml.JSONSource:/tmp/tmpnaxfsw3j/0.29422502264853956 updated
DEBUG:dffml.JSONSource:/tmp/tmpnaxfsw3j/0.29422502264853956 written
DEBUG:dffml.source.json:JSONSource(FileSourceConfig(filename=PosixPath('/tmp/tmpnaxfsw3j/0.29422502264853956'), tag='untagged', readwrite=True, allowempty=True)) saved 10 records
DEBUG:dffml.JSONSource:FileSourceConfig(filename=PosixPath('/tmp/tmpnaxfsw3j/0.29422502264853956'), tag='untagged', readwrite=True, allowempty=True)
DEBUG:dffml.JSONSource:PosixPath('/tmp/tmpnaxfsw3j/0.29422502264853956') is not a file, initializing memory to empty dict
DEBUG:dffml.JSONSource:/tmp/tmpnaxfsw3j/0.29422502264853956 first open
DEBUG:dffml.JSONSource:/tmp/tmpnaxfsw3j/0.29422502264853956 updated
DEBUG:dffml.JSONSource:/tmp/tmpnaxfsw3j/0.29422502264853956 written
DEBUG:dffml.source.json:JSONSource(FileSourceConfig(filename=PosixPath('/tmp/tmpnaxfsw3j/0.29422502264853956'), tag='untagged', readwrite=True, allowempty=True)) saved 2 records
DEBUG:dffml.cli.dataflow.Create:Setting config = <class 'dffml.configloader.json.JSONConfigLoader'>
DEBUG:dffml.cli.dataflow.Create:Setting log = 20
DEBUG:dffml.cli.dataflow.Create:Setting not_linked = False
DEBUG:dffml.cli.dataflow.Create:Setting operations = ['get_single', 'add', 'mult', 'parse_line']
DEBUG:dffml.cli.dataflow.Create:Setting seed = []
DEBUG:dffml.JSONConfigLoader:BaseConfig()
DEBUG:dffml.cli.dataflow.RunAllRecords:Setting caching = []
DEBUG:dffml.cli.dataflow.RunAllRecords:Setting config = None
DEBUG:dffml.cli.dataflow.RunAllRecords:Setting dataflow = '/tmp/tmp6dh9zu59.json'
DEBUG:dffml.cli.dataflow.RunAllRecords:Setting inputs = [(['result'], 'get_single_spec')]
DEBUG:dffml.cli.dataflow.RunAllRecords:Setting log = 20
DEBUG:dffml.cli.dataflow.RunAllRecords:Setting no_echo = False
DEBUG:dffml.cli.dataflow.RunAllRecords:Setting no_strict = False
DEBUG:dffml.cli.dataflow.RunAllRecords:Setting no_update = False
DEBUG:dffml.cli.dataflow.RunAllRecords:Setting orchestrator = <class 'dffml.df.memory.MemoryOrchestrator'>
DEBUG:dffml.cli.dataflow.RunAllRecords:Setting record_def = 'calc_string'
DEBUG:dffml.cli.dataflow.RunAllRecords:Setting sources = [<class 'dffml.base.JSONSource'>]
DEBUG:dffml.JSONSource:JSONSourceConfig(filename='/tmp/tmpnaxfsw3j/0.29422502264853956', tag='untagged', readwrite=False, allowempty=False)
DEBUG:dffml.MemoryInputNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationNetwork:MemoryOperationNetworkConfig(operations=[])
DEBUG:dffml.MemoryLockNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
DEBUG:dffml.MemoryRedundancyChecker:BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))
DEBUG:dffml.MemoryOrchestrator:MemoryOrchestratorConfig(input_network=MemoryInputNetwork(BaseConfig()), operation_network=MemoryOperationNetwork(MemoryOperationNetworkConfig(operations=[])), lock_network=MemoryLockNetwork(BaseConfig()), opimp_network=MemoryOperationImplementationNetwork(MemoryOperationImplementationNetworkConfig(operations={})), rchecker=MemoryRedundancyChecker(BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))))
DEBUG:dffml.JSONConfigLoader:BaseConfig()
DEBUG:dffml.util.AsyncContextManagerList.Sources:Entering: JSONSource(JSONSourceConfig(filename=PosixPath('/tmp/tmpnaxfsw3j/0.29422502264853956'), tag='untagged', readwrite=False, allowempty=False))
DEBUG:dffml.JSONSource:/tmp/tmpnaxfsw3j/0.29422502264853956 first open
DEBUG:dffml.source.json:JSONSource(JSONSourceConfig(filename=PosixPath('/tmp/tmpnaxfsw3j/0.29422502264853956'), tag='untagged', readwrite=False, allowempty=False)) loaded 2 records
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf12bed10>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation add(add) with base config
DEBUG:dffml.AddImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation get_single(get_single) with base config
DEBUG:dffml.GetSingleImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation mult(mult) with base config
DEBUG:dffml.MultImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation parse_line(parse_line) with base config
DEBUG:dffml.ParseLineImplementation:BaseConfig()
DEBUG:dffml.util.AsyncContextManagerListContext.SourcesContext:Entering context: <dffml.source.memory.MemorySourceContext object at 0x7f6cf1410990>
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf12bed10>: (<dffml.df.memory.MemoryInputSet object at 0x7f6cf1410ed0>, <dffml.df.memory.MemoryInputSet object at 0x7f6cf1410cd0>)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: <dffml.df.memory.MemoryInputSet object at 0x7f6cf1410ed0>
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: <dffml.df.memory.MemoryInputSet object at 0x7f6cf1410cd0>
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: add 40 and 2
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: multiply 42 and 10
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] parse_line
DEBUG:dffml.MemoryOrchestratorContext:[add 40 and 2]: dispatch operation: parse_line
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] parse_line
DEBUG:dffml.MemoryOrchestratorContext:[multiply 42 and 10]: dispatch operation: parse_line
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: parse_line
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'line': 'add 40 and 2'}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'add': True, 'mult': False, 'numbers': [40, 2]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: parse_line
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'line': 'multiply 42 and 10'}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'add': False, 'mult': True, 'numbers': [42, 10]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] add
DEBUG:dffml.MemoryOrchestratorContext:[add 40 and 2]: dispatch operation: add
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] mult
DEBUG:dffml.MemoryOrchestratorContext:[multiply 42 and 10]: dispatch operation: mult
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: add
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'numbers': [40, 2]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {'is_add': True}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'sum': 42}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: mult
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'numbers': [42, 10]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {'is_mult': True}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'product': 420}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: get_single
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': ['result']}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.GetSingle:output spec: [result]
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'result': 42}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: get_single
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': ['result']}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.GetSingle:output spec: [result]
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'result': 420}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
INFO:dffml.record:Evaluated multiply 42 and 10 {'result': 420}
DEBUG:dffml.source:Updating 'multiply 42 and 10': {'key': 'multiply 42 and 10', 'features': {'result': 420}, 'last_updated': '2020-05-28T12:51:56Z', 'extra': {}}
INFO:dffml.record:Evaluated add 40 and 2 {'result': 42}
DEBUG:dffml.source:Updating 'add 40 and 2': {'key': 'add 40 and 2', 'features': {'result': 42}, 'last_updated': '2020-05-28T12:51:56Z', 'extra': {}}
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
ok
test_run (tests.test_cli.TestDataflowRunRecordSet) ... DEBUG:dffml.JSONSource:FileSourceConfig(filename='/tmp/tmpv1e6do36/0.5234686363192922', tag='untagged', readwrite=True, allowempty=True)
DEBUG:dffml.JSONSource:PosixPath('/tmp/tmpv1e6do36/0.5234686363192922') is not a file, initializing memory to empty dict
DEBUG:dffml.JSONSource:/tmp/tmpv1e6do36/0.5234686363192922 first open
DEBUG:dffml.JSONSource:/tmp/tmpv1e6do36/0.5234686363192922 updated
DEBUG:dffml.JSONSource:/tmp/tmpv1e6do36/0.5234686363192922 written
DEBUG:dffml.source.json:JSONSource(FileSourceConfig(filename=PosixPath('/tmp/tmpv1e6do36/0.5234686363192922'), tag='untagged', readwrite=True, allowempty=True)) saved 10 records
DEBUG:dffml.JSONSource:FileSourceConfig(filename=PosixPath('/tmp/tmpv1e6do36/0.5234686363192922'), tag='untagged', readwrite=True, allowempty=True)
DEBUG:dffml.JSONSource:PosixPath('/tmp/tmpv1e6do36/0.5234686363192922') is not a file, initializing memory to empty dict
DEBUG:dffml.JSONSource:/tmp/tmpv1e6do36/0.5234686363192922 first open
DEBUG:dffml.JSONSource:/tmp/tmpv1e6do36/0.5234686363192922 updated
DEBUG:dffml.JSONSource:/tmp/tmpv1e6do36/0.5234686363192922 written
DEBUG:dffml.source.json:JSONSource(FileSourceConfig(filename=PosixPath('/tmp/tmpv1e6do36/0.5234686363192922'), tag='untagged', readwrite=True, allowempty=True)) saved 2 records
DEBUG:dffml.cli.dataflow.Create:Setting config = <class 'dffml.configloader.json.JSONConfigLoader'>
DEBUG:dffml.cli.dataflow.Create:Setting log = 20
DEBUG:dffml.cli.dataflow.Create:Setting not_linked = False
DEBUG:dffml.cli.dataflow.Create:Setting operations = ['get_single', 'add', 'mult', 'parse_line']
DEBUG:dffml.cli.dataflow.Create:Setting seed = []
DEBUG:dffml.JSONConfigLoader:BaseConfig()
DEBUG:dffml.cli.dataflow.RunRecordSet:Setting caching = []
DEBUG:dffml.cli.dataflow.RunRecordSet:Setting config = None
DEBUG:dffml.cli.dataflow.RunRecordSet:Setting dataflow = '/tmp/tmppf8n8gkt.json'
DEBUG:dffml.cli.dataflow.RunRecordSet:Setting inputs = [(['result'], 'get_single_spec')]
DEBUG:dffml.cli.dataflow.RunRecordSet:Setting keys = ['multiply 42 and 10']
DEBUG:dffml.cli.dataflow.RunRecordSet:Setting log = 20
DEBUG:dffml.cli.dataflow.RunRecordSet:Setting no_echo = False
DEBUG:dffml.cli.dataflow.RunRecordSet:Setting no_strict = False
DEBUG:dffml.cli.dataflow.RunRecordSet:Setting no_update = False
DEBUG:dffml.cli.dataflow.RunRecordSet:Setting orchestrator = <class 'dffml.df.memory.MemoryOrchestrator'>
DEBUG:dffml.cli.dataflow.RunRecordSet:Setting record_def = 'calc_string'
DEBUG:dffml.cli.dataflow.RunRecordSet:Setting sources = [<class 'dffml.base.JSONSource'>]
DEBUG:dffml.JSONSource:JSONSourceConfig(filename='/tmp/tmpv1e6do36/0.5234686363192922', tag='untagged', readwrite=False, allowempty=False)
DEBUG:dffml.MemoryInputNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationNetwork:MemoryOperationNetworkConfig(operations=[])
DEBUG:dffml.MemoryLockNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
DEBUG:dffml.MemoryRedundancyChecker:BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))
DEBUG:dffml.MemoryOrchestrator:MemoryOrchestratorConfig(input_network=MemoryInputNetwork(BaseConfig()), operation_network=MemoryOperationNetwork(MemoryOperationNetworkConfig(operations=[])), lock_network=MemoryLockNetwork(BaseConfig()), opimp_network=MemoryOperationImplementationNetwork(MemoryOperationImplementationNetworkConfig(operations={})), rchecker=MemoryRedundancyChecker(BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))))
DEBUG:dffml.JSONConfigLoader:BaseConfig()
DEBUG:dffml.util.AsyncContextManagerList.SubsetSources:Entering: JSONSource(JSONSourceConfig(filename=PosixPath('/tmp/tmpv1e6do36/0.5234686363192922'), tag='untagged', readwrite=False, allowempty=False))
DEBUG:dffml.JSONSource:/tmp/tmpv1e6do36/0.5234686363192922 first open
DEBUG:dffml.source.json:JSONSource(JSONSourceConfig(filename=PosixPath('/tmp/tmpv1e6do36/0.5234686363192922'), tag='untagged', readwrite=False, allowempty=False)) loaded 2 records
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf12b61d0>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation add(add) with base config
DEBUG:dffml.AddImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation get_single(get_single) with base config
DEBUG:dffml.GetSingleImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation mult(mult) with base config
DEBUG:dffml.MultImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation parse_line(parse_line) with base config
DEBUG:dffml.ParseLineImplementation:BaseConfig()
DEBUG:dffml.util.AsyncContextManagerListContext.ValidationSourcesContext:Entering context: <dffml.source.memory.MemorySourceContext object at 0x7f6cf12befd0>
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf12b61d0>: (<dffml.df.memory.MemoryInputSet object at 0x7f6cf13a3f50>,)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: <dffml.df.memory.MemoryInputSet object at 0x7f6cf13a3f50>
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: multiply 42 and 10
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] parse_line
DEBUG:dffml.MemoryOrchestratorContext:[multiply 42 and 10]: dispatch operation: parse_line
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: parse_line
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'line': 'multiply 42 and 10'}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'add': False, 'mult': True, 'numbers': [42, 10]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] mult
DEBUG:dffml.MemoryOrchestratorContext:[multiply 42 and 10]: dispatch operation: mult
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: mult
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'numbers': [42, 10]}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {'is_mult': True}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'product': 420}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: get_single
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': ['result']}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.GetSingle:output spec: [result]
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'result': 420}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
INFO:dffml.record:Evaluated multiply 42 and 10 {'result': 420}
DEBUG:dffml.source:Updating 'multiply 42 and 10': {'key': 'multiply 42 and 10', 'features': {'result': 420}, 'last_updated': '2020-05-28T12:51:56Z', 'extra': {}}
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
ok
test_run (tests.test_cli.TestListRecords) ... DEBUG:dffml.JSONSource:FileSourceConfig(filename='/tmp/tmp0x6wbqs_/0.695683775355966', tag='untagged', readwrite=True, allowempty=True)
DEBUG:dffml.JSONSource:PosixPath('/tmp/tmp0x6wbqs_/0.695683775355966') is not a file, initializing memory to empty dict
DEBUG:dffml.JSONSource:/tmp/tmp0x6wbqs_/0.695683775355966 first open
DEBUG:dffml.JSONSource:/tmp/tmp0x6wbqs_/0.695683775355966 updated
DEBUG:dffml.JSONSource:/tmp/tmp0x6wbqs_/0.695683775355966 written
DEBUG:dffml.source.json:JSONSource(FileSourceConfig(filename=PosixPath('/tmp/tmp0x6wbqs_/0.695683775355966'), tag='untagged', readwrite=True, allowempty=True)) saved 10 records
DEBUG:dffml.cli.list.ListRecords:Setting log = 20
DEBUG:dffml.cli.list.ListRecords:Setting sources = [<class 'dffml.base.JSONSource'>]
DEBUG:dffml.JSONSource:JSONSourceConfig(filename='/tmp/tmp0x6wbqs_/0.695683775355966', tag='untagged', readwrite=True, allowempty=False)
DEBUG:dffml.util.AsyncContextManagerList.Sources:Entering: JSONSource(JSONSourceConfig(filename=PosixPath('/tmp/tmp0x6wbqs_/0.695683775355966'), tag='untagged', readwrite=True, allowempty=False))
DEBUG:dffml.JSONSource:/tmp/tmp0x6wbqs_/0.695683775355966 first open
DEBUG:dffml.source.json:JSONSource(JSONSourceConfig(filename=PosixPath('/tmp/tmp0x6wbqs_/0.695683775355966'), tag='untagged', readwrite=True, allowempty=False)) loaded 10 records
DEBUG:dffml.util.AsyncContextManagerListContext.SourcesContext:Entering context: <dffml.source.memory.MemorySourceContext object at 0x7f6cf04826d0>
DEBUG:dffml.JSONSource:/tmp/tmp0x6wbqs_/0.695683775355966 updated
DEBUG:dffml.JSONSource:/tmp/tmp0x6wbqs_/0.695683775355966 written
DEBUG:dffml.source.json:JSONSource(JSONSourceConfig(filename=PosixPath('/tmp/tmp0x6wbqs_/0.695683775355966'), tag='untagged', readwrite=True, allowempty=False)) saved 10 records
ok
test_csv_tag (tests.test_cli.TestMerge) ... DEBUG:dffml.JSONSource:FileSourceConfig(filename='/tmp/tmp25rj33vc/0.04679075492976437', tag='untagged', readwrite=True, allowempty=True)
DEBUG:dffml.JSONSource:PosixPath('/tmp/tmp25rj33vc/0.04679075492976437') is not a file, initializing memory to empty dict
DEBUG:dffml.JSONSource:/tmp/tmp25rj33vc/0.04679075492976437 first open
DEBUG:dffml.JSONSource:/tmp/tmp25rj33vc/0.04679075492976437 updated
DEBUG:dffml.JSONSource:/tmp/tmp25rj33vc/0.04679075492976437 written
DEBUG:dffml.source.json:JSONSource(FileSourceConfig(filename=PosixPath('/tmp/tmp25rj33vc/0.04679075492976437'), tag='untagged', readwrite=True, allowempty=True)) saved 10 records
DEBUG:dffml.cli.cli.Merge:Setting dest = <class 'dffml.base.CSVSource'>
DEBUG:dffml.cli.cli.Merge:Setting log = 20
DEBUG:dffml.cli.cli.Merge:Setting src = <class 'dffml.base.JSONSource'>
DEBUG:dffml.JSONSource:JSONSourceConfig(filename='/tmp/tmp25rj33vc/0.04679075492976437', tag='untagged', readwrite=True, allowempty=True)
DEBUG:dffml.JSONSource:/tmp/tmp25rj33vc/0.04679075492976437 first open
DEBUG:dffml.source.json:JSONSource(JSONSourceConfig(filename=PosixPath('/tmp/tmp25rj33vc/0.04679075492976437'), tag='untagged', readwrite=True, allowempty=True)) loaded 10 records
DEBUG:dffml.CSVSource:CSVSourceConfig(filename='/tmp/tmpiko_lgk2/0.3552242178679834', tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)
DEBUG:dffml.CSVSource:PosixPath('/tmp/tmpiko_lgk2/0.3552242178679834') is not a file, initializing memory to empty dict
DEBUG:dffml.CSVSource:/tmp/tmpiko_lgk2/0.3552242178679834 first open
DEBUG:dffml.CSVSource:fieldnames: ['key', 'tag']
DEBUG:dffml.CSVSource:/tmp/tmpiko_lgk2/0.3552242178679834 written
DEBUG:dffml.CSVSource:CSVSource(CSVSourceConfig(filename=PosixPath('/tmp/tmpiko_lgk2/0.3552242178679834'), tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)) saved 10 records
DEBUG:dffml.JSONSource:/tmp/tmp25rj33vc/0.04679075492976437 updated
DEBUG:dffml.JSONSource:/tmp/tmp25rj33vc/0.04679075492976437 written
DEBUG:dffml.source.json:JSONSource(JSONSourceConfig(filename=PosixPath('/tmp/tmp25rj33vc/0.04679075492976437'), tag='untagged', readwrite=True, allowempty=True)) saved 10 records
DEBUG:dffml.cli.cli.Merge:Setting dest = <class 'dffml.base.CSVSource'>
DEBUG:dffml.cli.cli.Merge:Setting log = 20
DEBUG:dffml.cli.cli.Merge:Setting src = <class 'dffml.base.CSVSource'>
DEBUG:dffml.CSVSource:CSVSourceConfig(filename='/tmp/tmpiko_lgk2/0.3552242178679834', tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)
DEBUG:dffml.CSVSource:/tmp/tmpiko_lgk2/0.3552242178679834 first open
DEBUG:dffml.CSVSource:CSVSource(CSVSourceConfig(filename=PosixPath('/tmp/tmpiko_lgk2/0.3552242178679834'), tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)) loaded 10 records
DEBUG:dffml.CSVSource:CSVSourceConfig(filename='/tmp/tmpiko_lgk2/0.3552242178679834', tag='sometag', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)
DEBUG:dffml.CSVSource:/tmp/tmpiko_lgk2/0.3552242178679834 already open
DEBUG:dffml.CSVSource:CSVSource(CSVSourceConfig(filename=PosixPath('/tmp/tmpiko_lgk2/0.3552242178679834'), tag='sometag', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)) loaded 0 records
DEBUG:dffml.CSVSource:fieldnames: ['key', 'tag']
DEBUG:dffml.CSVSource:/tmp/tmpiko_lgk2/0.3552242178679834 written
DEBUG:dffml.CSVSource:CSVSource(CSVSourceConfig(filename=PosixPath('/tmp/tmpiko_lgk2/0.3552242178679834'), tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)) saved 10 records
DEBUG:dffml.CSVSource:CSVSourceConfig(filename='/tmp/tmpiko_lgk2/0.3552242178679834', tag='untagged', readwrite=False, allowempty=False, key='key', tagcol='tag', loadfiles=None)
DEBUG:dffml.CSVSource:/tmp/tmpiko_lgk2/0.3552242178679834 first open
DEBUG:dffml.CSVSource:CSVSource(CSVSourceConfig(filename=PosixPath('/tmp/tmpiko_lgk2/0.3552242178679834'), tag='untagged', readwrite=False, allowempty=False, key='key', tagcol='tag', loadfiles=None)) loaded 10 records
DEBUG:dffml.CSVSource:CSVSourceConfig(filename='/tmp/tmpiko_lgk2/0.3552242178679834', tag='sometag', readwrite=False, allowempty=False, key='key', tagcol='tag', loadfiles=None)
DEBUG:dffml.CSVSource:/tmp/tmpiko_lgk2/0.3552242178679834 already open
DEBUG:dffml.CSVSource:CSVSource(CSVSourceConfig(filename=PosixPath('/tmp/tmpiko_lgk2/0.3552242178679834'), tag='sometag', readwrite=False, allowempty=False, key='key', tagcol='tag', loadfiles=None)) loaded 10 records
ok
test_json_tag (tests.test_cli.TestMerge) ... DEBUG:dffml.JSONSource:FileSourceConfig(filename='/tmp/tmpvb77m9ug/0.1349380234645381', tag='untagged', readwrite=True, allowempty=True)
DEBUG:dffml.JSONSource:PosixPath('/tmp/tmpvb77m9ug/0.1349380234645381') is not a file, initializing memory to empty dict
DEBUG:dffml.JSONSource:/tmp/tmpvb77m9ug/0.1349380234645381 first open
DEBUG:dffml.JSONSource:/tmp/tmpvb77m9ug/0.1349380234645381 updated
DEBUG:dffml.JSONSource:/tmp/tmpvb77m9ug/0.1349380234645381 written
DEBUG:dffml.source.json:JSONSource(FileSourceConfig(filename=PosixPath('/tmp/tmpvb77m9ug/0.1349380234645381'), tag='untagged', readwrite=True, allowempty=True)) saved 10 records
DEBUG:dffml.cli.cli.Merge:Setting dest = <class 'dffml.base.JSONSource'>
DEBUG:dffml.cli.cli.Merge:Setting log = 20
DEBUG:dffml.cli.cli.Merge:Setting src = <class 'dffml.base.JSONSource'>
DEBUG:dffml.JSONSource:JSONSourceConfig(filename='/tmp/tmpvb77m9ug/0.1349380234645381', tag='untagged', readwrite=True, allowempty=True)
DEBUG:dffml.JSONSource:/tmp/tmpvb77m9ug/0.1349380234645381 first open
DEBUG:dffml.source.json:JSONSource(JSONSourceConfig(filename=PosixPath('/tmp/tmpvb77m9ug/0.1349380234645381'), tag='untagged', readwrite=True, allowempty=True)) loaded 10 records
DEBUG:dffml.JSONSource:JSONSourceConfig(filename='/tmp/tmpvb77m9ug/0.1349380234645381', tag='sometag', readwrite=True, allowempty=True)
DEBUG:dffml.JSONSource:/tmp/tmpvb77m9ug/0.1349380234645381 already open
DEBUG:dffml.source.json:JSONSource(JSONSourceConfig(filename=PosixPath('/tmp/tmpvb77m9ug/0.1349380234645381'), tag='sometag', readwrite=True, allowempty=True)) loaded 0 records
DEBUG:dffml.JSONSource:/tmp/tmpvb77m9ug/0.1349380234645381 updated
DEBUG:dffml.source.json:JSONSource(JSONSourceConfig(filename=PosixPath('/tmp/tmpvb77m9ug/0.1349380234645381'), tag='sometag', readwrite=True, allowempty=True)) saved 10 records
DEBUG:dffml.JSONSource:/tmp/tmpvb77m9ug/0.1349380234645381 updated
DEBUG:dffml.JSONSource:/tmp/tmpvb77m9ug/0.1349380234645381 written
DEBUG:dffml.source.json:JSONSource(JSONSourceConfig(filename=PosixPath('/tmp/tmpvb77m9ug/0.1349380234645381'), tag='untagged', readwrite=True, allowempty=True)) saved 10 records
DEBUG:dffml.JSONSource:FileSourceConfig(filename='/tmp/tmpvb77m9ug/0.1349380234645381', tag='untagged', readwrite=False, allowempty=False)
DEBUG:dffml.JSONSource:/tmp/tmpvb77m9ug/0.1349380234645381 first open
DEBUG:dffml.source.json:JSONSource(FileSourceConfig(filename=PosixPath('/tmp/tmpvb77m9ug/0.1349380234645381'), tag='untagged', readwrite=False, allowempty=False)) loaded 10 records
DEBUG:dffml.JSONSource:FileSourceConfig(filename='/tmp/tmpvb77m9ug/0.1349380234645381', tag='sometag', readwrite=False, allowempty=False)
DEBUG:dffml.JSONSource:/tmp/tmpvb77m9ug/0.1349380234645381 already open
DEBUG:dffml.source.json:JSONSource(FileSourceConfig(filename=PosixPath('/tmp/tmpvb77m9ug/0.1349380234645381'), tag='sometag', readwrite=False, allowempty=False)) loaded 10 records
ok
test_json_to_csv (tests.test_cli.TestMerge) ... DEBUG:dffml.JSONSource:FileSourceConfig(filename='/tmp/tmpk2zfjd5q/0.0418184287735639', tag='untagged', readwrite=True, allowempty=True)
DEBUG:dffml.JSONSource:PosixPath('/tmp/tmpk2zfjd5q/0.0418184287735639') is not a file, initializing memory to empty dict
DEBUG:dffml.JSONSource:/tmp/tmpk2zfjd5q/0.0418184287735639 first open
DEBUG:dffml.JSONSource:/tmp/tmpk2zfjd5q/0.0418184287735639 updated
DEBUG:dffml.JSONSource:/tmp/tmpk2zfjd5q/0.0418184287735639 written
DEBUG:dffml.source.json:JSONSource(FileSourceConfig(filename=PosixPath('/tmp/tmpk2zfjd5q/0.0418184287735639'), tag='untagged', readwrite=True, allowempty=True)) saved 10 records
DEBUG:dffml.cli.cli.Merge:Setting dest = <class 'dffml.base.CSVSource'>
DEBUG:dffml.cli.cli.Merge:Setting log = 20
DEBUG:dffml.cli.cli.Merge:Setting src = <class 'dffml.base.JSONSource'>
DEBUG:dffml.JSONSource:JSONSourceConfig(filename='/tmp/tmpk2zfjd5q/0.0418184287735639', tag='untagged', readwrite=True, allowempty=True)
DEBUG:dffml.JSONSource:/tmp/tmpk2zfjd5q/0.0418184287735639 first open
DEBUG:dffml.source.json:JSONSource(JSONSourceConfig(filename=PosixPath('/tmp/tmpk2zfjd5q/0.0418184287735639'), tag='untagged', readwrite=True, allowempty=True)) loaded 10 records
DEBUG:dffml.CSVSource:CSVSourceConfig(filename='/tmp/tmpt9jbaw9k/0.01286842338693539', tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)
DEBUG:dffml.CSVSource:PosixPath('/tmp/tmpt9jbaw9k/0.01286842338693539') is not a file, initializing memory to empty dict
DEBUG:dffml.CSVSource:/tmp/tmpt9jbaw9k/0.01286842338693539 first open
DEBUG:dffml.CSVSource:fieldnames: ['key', 'tag']
DEBUG:dffml.CSVSource:/tmp/tmpt9jbaw9k/0.01286842338693539 written
DEBUG:dffml.CSVSource:CSVSource(CSVSourceConfig(filename=PosixPath('/tmp/tmpt9jbaw9k/0.01286842338693539'), tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)) saved 10 records
DEBUG:dffml.JSONSource:/tmp/tmpk2zfjd5q/0.0418184287735639 updated
DEBUG:dffml.JSONSource:/tmp/tmpk2zfjd5q/0.0418184287735639 written
DEBUG:dffml.source.json:JSONSource(JSONSourceConfig(filename=PosixPath('/tmp/tmpk2zfjd5q/0.0418184287735639'), tag='untagged', readwrite=True, allowempty=True)) saved 10 records
ok
test_all (tests.test_cli.TestPredict) ... DEBUG:dffml.JSONSource:FileSourceConfig(filename='/tmp/tmpavil7y4c/0.02791846920676988', tag='untagged', readwrite=True, allowempty=True)
DEBUG:dffml.JSONSource:PosixPath('/tmp/tmpavil7y4c/0.02791846920676988') is not a file, initializing memory to empty dict
DEBUG:dffml.JSONSource:/tmp/tmpavil7y4c/0.02791846920676988 first open
DEBUG:dffml.JSONSource:/tmp/tmpavil7y4c/0.02791846920676988 updated
DEBUG:dffml.JSONSource:/tmp/tmpavil7y4c/0.02791846920676988 written
DEBUG:dffml.source.json:JSONSource(FileSourceConfig(filename=PosixPath('/tmp/tmpavil7y4c/0.02791846920676988'), tag='untagged', readwrite=True, allowempty=True)) saved 10 records
DEBUG:dffml.cli.ml.PredictAll:Setting log = 20
DEBUG:dffml.cli.ml.PredictAll:Setting model = <class 'tests.test_cli.FakeModel'>
DEBUG:dffml.cli.ml.PredictAll:Setting sources = [<class 'dffml.base.JSONSource'>]
DEBUG:dffml.cli.ml.PredictAll:Setting update = False
DEBUG:dffml.JSONSource:JSONSourceConfig(filename='/tmp/tmpavil7y4c/0.02791846920676988', tag='untagged', readwrite=False, allowempty=False)
DEBUG:dffml.FakeModel:FakeConfig(features=[fake(Feature)[<class 'float'>, [10, 0]]], predict=fake(Feature)[<class 'int'>, 1], directory='/tmp/tmp.tPXahPUaJ4/.cache/dffml/test_cli/fake')
DEBUG:dffml.cli.ml.PredictAll:Ignored log
DEBUG:dffml.cli.ml.PredictAll:Ignored model
DEBUG:dffml.cli.ml.PredictAll:Ignored sources
DEBUG:dffml.cli.ml.PredictAll:Ignored update
DEBUG:dffml.util.AsyncContextManagerList.Sources:Entering: JSONSource(JSONSourceConfig(filename=PosixPath('/tmp/tmpavil7y4c/0.02791846920676988'), tag='untagged', readwrite=False, allowempty=False))
DEBUG:dffml.JSONSource:/tmp/tmpavil7y4c/0.02791846920676988 first open
DEBUG:dffml.source.json:JSONSource(JSONSourceConfig(filename=PosixPath('/tmp/tmpavil7y4c/0.02791846920676988'), tag='untagged', readwrite=False, allowempty=False)) loaded 10 records
DEBUG:dffml.util.AsyncContextManagerListContext.SourcesContext:Entering context: <dffml.source.memory.MemorySourceContext object at 0x7f6cf12afed0>
ok
test_record (tests.test_cli.TestPredict) ... DEBUG:dffml.JSONSource:FileSourceConfig(filename='/tmp/tmp6otyo4vi/0.06725536351972738', tag='untagged', readwrite=True, allowempty=True)
DEBUG:dffml.JSONSource:PosixPath('/tmp/tmp6otyo4vi/0.06725536351972738') is not a file, initializing memory to empty dict
DEBUG:dffml.JSONSource:/tmp/tmp6otyo4vi/0.06725536351972738 first open
DEBUG:dffml.JSONSource:/tmp/tmp6otyo4vi/0.06725536351972738 updated
DEBUG:dffml.JSONSource:/tmp/tmp6otyo4vi/0.06725536351972738 written
DEBUG:dffml.source.json:JSONSource(FileSourceConfig(filename=PosixPath('/tmp/tmp6otyo4vi/0.06725536351972738'), tag='untagged', readwrite=True, allowempty=True)) saved 10 records
DEBUG:dffml.cli.ml.PredictRecord:Setting keys = ['0.7469570629605888', '0.8110212311052328', '0.6140330881430139', '0.9177135449749785', '0.36871327638140095']
DEBUG:dffml.cli.ml.PredictRecord:Setting log = 20
DEBUG:dffml.cli.ml.PredictRecord:Setting model = <class 'tests.test_cli.FakeModel'>
DEBUG:dffml.cli.ml.PredictRecord:Setting sources = [<class 'dffml.base.JSONSource'>]
DEBUG:dffml.cli.ml.PredictRecord:Setting update = False
DEBUG:dffml.JSONSource:JSONSourceConfig(filename='/tmp/tmp6otyo4vi/0.06725536351972738', tag='untagged', readwrite=False, allowempty=False)
DEBUG:dffml.FakeModel:FakeConfig(features=[fake(Feature)[<class 'int'>, 1]], predict=fake(Feature)[<class 'int'>, 1], directory='/tmp/tmp.tPXahPUaJ4/.cache/dffml/test_cli/fake')
DEBUG:dffml.cli.ml.PredictRecord:Ignored keys
DEBUG:dffml.cli.ml.PredictRecord:Ignored log
DEBUG:dffml.cli.ml.PredictRecord:Ignored model
DEBUG:dffml.cli.ml.PredictRecord:Ignored sources
DEBUG:dffml.cli.ml.PredictRecord:Ignored update
DEBUG:dffml.util.AsyncContextManagerList.SubsetSources:Entering: JSONSource(JSONSourceConfig(filename=PosixPath('/tmp/tmp6otyo4vi/0.06725536351972738'), tag='untagged', readwrite=False, allowempty=False))
DEBUG:dffml.JSONSource:/tmp/tmp6otyo4vi/0.06725536351972738 first open
DEBUG:dffml.source.json:JSONSource(JSONSourceConfig(filename=PosixPath('/tmp/tmp6otyo4vi/0.06725536351972738'), tag='untagged', readwrite=False, allowempty=False)) loaded 10 records
DEBUG:dffml.util.AsyncContextManagerListContext.ValidationSourcesContext:Entering context: <dffml.source.memory.MemorySourceContext object at 0x7f6cf12d3cd0>
ok
test_run (tests.test_cli.TestTrain) ... DEBUG:dffml.JSONSource:FileSourceConfig(filename='/tmp/tmpo040i01l/0.34690259533030166', tag='untagged', readwrite=True, allowempty=True)
DEBUG:dffml.JSONSource:PosixPath('/tmp/tmpo040i01l/0.34690259533030166') is not a file, initializing memory to empty dict
DEBUG:dffml.JSONSource:/tmp/tmpo040i01l/0.34690259533030166 first open
DEBUG:dffml.JSONSource:/tmp/tmpo040i01l/0.34690259533030166 updated
DEBUG:dffml.JSONSource:/tmp/tmpo040i01l/0.34690259533030166 written
DEBUG:dffml.source.json:JSONSource(FileSourceConfig(filename=PosixPath('/tmp/tmpo040i01l/0.34690259533030166'), tag='untagged', readwrite=True, allowempty=True)) saved 10 records
DEBUG:dffml.cli.ml.Train:Setting log = 20
DEBUG:dffml.cli.ml.Train:Setting model = <class 'tests.test_cli.FakeModel'>
DEBUG:dffml.cli.ml.Train:Setting sources = [<class 'dffml.base.JSONSource'>]
DEBUG:dffml.JSONSource:JSONSourceConfig(filename='/tmp/tmpo040i01l/0.34690259533030166', tag='untagged', readwrite=False, allowempty=False)
DEBUG:dffml.FakeModel:FakeConfig(features=[fake(Feature)[<class 'int'>, 1]], predict=fake(Feature)[<class 'int'>, 1], directory='/tmp/tmp.tPXahPUaJ4/.cache/dffml/test_cli/fake')
DEBUG:dffml.cli.ml.Train:Ignored log
DEBUG:dffml.cli.ml.Train:Ignored model
DEBUG:dffml.cli.ml.Train:Ignored sources
DEBUG:dffml.util.AsyncContextManagerList.Sources:Entering: JSONSource(JSONSourceConfig(filename=PosixPath('/tmp/tmpo040i01l/0.34690259533030166'), tag='untagged', readwrite=False, allowempty=False))
DEBUG:dffml.JSONSource:/tmp/tmpo040i01l/0.34690259533030166 first open
DEBUG:dffml.source.json:JSONSource(JSONSourceConfig(filename=PosixPath('/tmp/tmpo040i01l/0.34690259533030166'), tag='untagged', readwrite=False, allowempty=False)) loaded 10 records
DEBUG:dffml.util.AsyncContextManagerListContext.SourcesContext:Entering context: <dffml.source.memory.MemorySourceContext object at 0x7f6cf14d3ed0>
ok
test_args (tests.source.test_file.TestFileSource) ... ok
test_close (tests.source.test_file.TestFileSource) ... DEBUG:dffml.FakeFileSource:FileSourceConfig(filename='testfile', tag='untagged', readwrite=True, allowempty=True)
DEBUG:dffml.FakeFileSource:PosixPath('testfile') is not a file, initializing memory to empty dict
ok
test_close_bz2 (tests.source.test_file.TestFileSource) ... DEBUG:dffml.FakeFileSource:FileSourceConfig(filename='testfile.bz2', tag='untagged', readwrite=True, allowempty=True)
DEBUG:dffml.FakeFileSource:PosixPath('testfile.bz2') is not a file, initializing memory to empty dict
ok
test_close_gz (tests.source.test_file.TestFileSource) ... DEBUG:dffml.FakeFileSource:FileSourceConfig(filename='testfile.gz', tag='untagged', readwrite=True, allowempty=True)
DEBUG:dffml.FakeFileSource:PosixPath('testfile.gz') is not a file, initializing memory to empty dict
ok
test_close_lzma (tests.source.test_file.TestFileSource) ... DEBUG:dffml.FakeFileSource:FileSourceConfig(filename='testfile.lzma', tag='untagged', readwrite=True, allowempty=True)
DEBUG:dffml.FakeFileSource:PosixPath('testfile.lzma') is not a file, initializing memory to empty dict
ok
test_close_readonly (tests.source.test_file.TestFileSource) ... DEBUG:dffml.FakeFileSource:FileSourceConfig(filename='testfile', tag='untagged', readwrite=False, allowempty=True)
DEBUG:dffml.FakeFileSource:PosixPath('testfile') is not a file, initializing memory to empty dict
ok
test_close_xz (tests.source.test_file.TestFileSource) ... DEBUG:dffml.FakeFileSource:FileSourceConfig(filename='testfile.xz', tag='untagged', readwrite=True, allowempty=True)
DEBUG:dffml.FakeFileSource:PosixPath('testfile.xz') is not a file, initializing memory to empty dict
ok
test_close_zip (tests.source.test_file.TestFileSource) ... DEBUG:dffml.FakeFileSource:FileSourceConfig(filename='testfile.zip', tag='untagged', readwrite=True, allowempty=True)
DEBUG:dffml.FakeFileSource:PosixPath('testfile.zip') is not a file, initializing memory to empty dict
ok
test_config_readonly_default (tests.source.test_file.TestFileSource) ... ok
test_config_readonly_set (tests.source.test_file.TestFileSource) ... ok
test_open (tests.source.test_file.TestFileSource) ... DEBUG:dffml.FakeFileSource:FileSourceConfig(filename='testfile', tag='untagged', readwrite=False, allowempty=True)
ok
test_open_bz2 (tests.source.test_file.TestFileSource) ... DEBUG:dffml.FakeFileSource:FileSourceConfig(filename='testfile.bz2', tag='untagged', readwrite=False, allowempty=True)
ok
test_open_gz (tests.source.test_file.TestFileSource) ... DEBUG:dffml.FakeFileSource:FileSourceConfig(filename='testfile.gz', tag='untagged', readwrite=False, allowempty=True)
ok
test_open_lzma (tests.source.test_file.TestFileSource) ... DEBUG:dffml.FakeFileSource:FileSourceConfig(filename='testfile.lzma', tag='untagged', readwrite=False, allowempty=True)
ok
test_open_no_file (tests.source.test_file.TestFileSource) ... DEBUG:dffml.FakeFileSource:FileSourceConfig(filename='testfile', tag='untagged', readwrite=False, allowempty=True)
DEBUG:dffml.FakeFileSource:PosixPath('testfile') is not a file, initializing memory to empty dict
ok
test_open_xz (tests.source.test_file.TestFileSource) ... DEBUG:dffml.FakeFileSource:FileSourceConfig(filename='testfile.xz', tag='untagged', readwrite=False, allowempty=True)
ok
test_open_zip (tests.source.test_file.TestFileSource) ... DEBUG:dffml.FakeFileSource:FileSourceConfig(filename='testfile.zip', tag='untagged', readwrite=False, allowempty=True)
ok
test_ini (tests.source.test_ini.TestINISource) ... DEBUG:dffml.INISource:INISourceConfig(filename='/tmp/tmp6xndo0mi/testfile.ini', readwrite=True, allowempty=True)
DEBUG:dffml.INISource:PosixPath('/tmp/tmp6xndo0mi/testfile.ini') is not a file, initializing memory to empty dict
DEBUG:dffml.INISource:INISource(INISourceConfig(filename=PosixPath('/tmp/tmp6xndo0mi/testfile.ini'), readwrite=True, allowempty=True)) saved 2 sections
DEBUG:dffml.INISource:INISource(INISourceConfig(filename=PosixPath('/tmp/tmp6xndo0mi/testfile.ini'), readwrite=True, allowempty=True)) loaded 2 sections
DEBUG:dffml.INISource:INISource(INISourceConfig(filename=PosixPath('/tmp/tmp6xndo0mi/testfile.ini'), readwrite=True, allowempty=True)) saved 2 sections
ok
DEBUG:dffml.MemorySource:MemorySourceConfig(records=[{'key': '0', 'features': {'Years': 0, 'Expertise': 1, 'Trust': 0.1, 'Salary': 10}, 'extra': {}}, {'key': '1', 'features': {'Years': 1, 'Expertise': 3, 'Trust': 0.2, 'Salary': 20}, 'extra': {}}, {'key': '2', 'features': {'Years': 2, 'Expertise': 5, 'Trust': 0.3, 'Salary': 30}, 'extra': {}}, {'key': '3', 'features': {'Years': 3, 'Expertise': 7, 'Trust': 0.4, 'Salary': 40}, 'extra': {}}])
test_records (tests.source.test_df.TestDataFlowSource) ... DEBUG:dffml.DataFlowSource:DataFlowSourceConfig(source=[MemorySource(MemorySourceConfig(records=[{'key': '0', 'features': {'Years': 0, 'Expertise': 1, 'Trust': 0.1, 'Salary': 10}, 'extra': {}}, {'key': '1', 'features': {'Years': 1, 'Expertise': 3, 'Trust': 0.2, 'Salary': 20}, 'extra': {}}, {'key': '2', 'features': {'Years': 2, 'Expertise': 5, 'Trust': 0.3, 'Salary': 30}, 'extra': {}}, {'key': '3', 'features': {'Years': 3, 'Expertise': 7, 'Trust': 0.4, 'Salary': 40}, 'extra': {}}]))], dataflow=<dffml.df.types.DataFlow object at 0x7f6c...
DEBUG:dffml.util.AsyncContextManagerList.Sources:Entering: MemorySource(MemorySourceConfig(records=[{'key': '0', 'features': {'Years': 0, 'Expertise': 1, 'Trust': 0.1, 'Salary': 10}, 'extra': {}}, {'key': '1', 'features': {'Years': 1, 'Expertise': 3, 'Trust': 0.2, 'Salary': 20}, 'extra': {}}, {'key': '2', 'features': {'Years': 2, 'Expertise': 5, 'Trust': 0.3, 'Salary': 30}, 'extra': {}}, {'key': '3', 'features': {'Years': 3, 'Expertise': 7, 'Trust': 0.4, 'Salary': 40}, 'extra': {}}]))
DEBUG:dffml.util.AsyncContextManagerListContext.SourcesContext:Entering context: <dffml.source.memory.MemorySourceContext object at 0x7f6cf120de10>
DEBUG:dffml.MemoryInputNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationNetwork:MemoryOperationNetworkConfig(operations=[])
DEBUG:dffml.MemoryLockNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
DEBUG:dffml.MemoryRedundancyChecker:BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))
DEBUG:dffml.MemoryOrchestrator:MemoryOrchestratorConfig(input_network=MemoryInputNetwork(BaseConfig()), operation_network=MemoryOperationNetwork(MemoryOperationNetworkConfig(operations=[])), lock_network=MemoryLockNetwork(BaseConfig()), opimp_network=MemoryOperationImplementationNetwork(MemoryOperationImplementationNetworkConfig(operations={})), rchecker=MemoryRedundancyChecker(BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))))
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf1714d50>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation edit_feature(edit_feature) with base config
DEBUG:dffml.EditFeatureImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation associate_definition(associate_definition) with base config
DEBUG:dffml.AssociateDefinitionImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf1714d50>: ([Input(value=0, definition=Years), Input(value=1, definition=Expertise), Input(value=0.1, definition=Trust), Input(value=10, definition=Salary)],)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=0, definition=Years), Input(value=1, definition=Expertise), Input(value=0.1, definition=Trust), Input(value=10, definition=Salary)]
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: c564bd6431b112a8adc967da54b1ff0e3935fb4872a9205ee953668f71e81c67
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=True>] done
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] edit_feature
DEBUG:dffml.MemoryOrchestratorContext:[c564bd6431b112a8adc967da54b1ff0e3935fb4872a9205ee953668f71e81c67]: dispatch operation: edit_feature
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=True>] done
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] edit_feature
DEBUG:dffml.MemoryOrchestratorContext:[c564bd6431b112a8adc967da54b1ff0e3935fb4872a9205ee953668f71e81c67]: dispatch operation: edit_feature
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=True>] done
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] edit_feature
DEBUG:dffml.MemoryOrchestratorContext:[c564bd6431b112a8adc967da54b1ff0e3935fb4872a9205ee953668f71e81c67]: dispatch operation: edit_feature
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=True>] done
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] edit_feature
DEBUG:dffml.MemoryOrchestratorContext:[c564bd6431b112a8adc967da54b1ff0e3935fb4872a9205ee953668f71e81c67]: dispatch operation: edit_feature
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: edit_feature
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'features': 0}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'updated_features': 0}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: edit_feature
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'features': 10}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'updated_features': 100}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: edit_feature
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'features': 1}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'updated_features': 10}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: edit_feature
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'features': 0.1}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'updated_features': 1.0}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: associate_definition
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': {'Years': 'feature_data', 'Expertise': 'feature_data', 'Trust': 'feature_data', 'Salary': 'feature_data'}}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'Years': 0, 'Expertise': 10, 'Trust': 1.0, 'Salary': 100}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
INFO:dffml.record:Evaluated 0 {'Years': 0, 'Expertise': 10, 'Trust': 1.0, 'Salary': 100}
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
DEBUG:dffml.MemoryInputNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationNetwork:MemoryOperationNetworkConfig(operations=[])
DEBUG:dffml.MemoryLockNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
DEBUG:dffml.MemoryRedundancyChecker:BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))
DEBUG:dffml.MemoryOrchestrator:MemoryOrchestratorConfig(input_network=MemoryInputNetwork(BaseConfig()), operation_network=MemoryOperationNetwork(MemoryOperationNetworkConfig(operations=[])), lock_network=MemoryLockNetwork(BaseConfig()), opimp_network=MemoryOperationImplementationNetwork(MemoryOperationImplementationNetworkConfig(operations={})), rchecker=MemoryRedundancyChecker(BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))))
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf1714d50>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation edit_feature(edit_feature) with base config
DEBUG:dffml.EditFeatureImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation associate_definition(associate_definition) with base config
DEBUG:dffml.AssociateDefinitionImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf1714d50>: ([Input(value=1, definition=Years), Input(value=3, definition=Expertise), Input(value=0.2, definition=Trust), Input(value=20, definition=Salary)],)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=1, definition=Years), Input(value=3, definition=Expertise), Input(value=0.2, definition=Trust), Input(value=20, definition=Salary)]
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: 577e24fe0f3a630b805e0f3f2abd26a7d697cd3156512396d0a937ae3795c108
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=True>] done
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] edit_feature
DEBUG:dffml.MemoryOrchestratorContext:[577e24fe0f3a630b805e0f3f2abd26a7d697cd3156512396d0a937ae3795c108]: dispatch operation: edit_feature
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=True>] done
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] edit_feature
DEBUG:dffml.MemoryOrchestratorContext:[577e24fe0f3a630b805e0f3f2abd26a7d697cd3156512396d0a937ae3795c108]: dispatch operation: edit_feature
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=True>] done
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] edit_feature
DEBUG:dffml.MemoryOrchestratorContext:[577e24fe0f3a630b805e0f3f2abd26a7d697cd3156512396d0a937ae3795c108]: dispatch operation: edit_feature
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=True>] done
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] edit_feature
DEBUG:dffml.MemoryOrchestratorContext:[577e24fe0f3a630b805e0f3f2abd26a7d697cd3156512396d0a937ae3795c108]: dispatch operation: edit_feature
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: edit_feature
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'features': 0.2}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'updated_features': 2.0}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: edit_feature
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'features': 1}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'updated_features': 10}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: edit_feature
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'features': 3}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'updated_features': 30}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: edit_feature
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'features': 20}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'updated_features': 200}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: associate_definition
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': {'Years': 'feature_data', 'Expertise': 'feature_data', 'Trust': 'feature_data', 'Salary': 'feature_data'}}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'Years': 10, 'Expertise': 30, 'Trust': 2.0, 'Salary': 200}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
INFO:dffml.record:Evaluated 1 {'Years': 10, 'Expertise': 30, 'Trust': 2.0, 'Salary': 200}
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
DEBUG:dffml.MemoryInputNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationNetwork:MemoryOperationNetworkConfig(operations=[])
DEBUG:dffml.MemoryLockNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
DEBUG:dffml.MemoryRedundancyChecker:BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))
DEBUG:dffml.MemoryOrchestrator:MemoryOrchestratorConfig(input_network=MemoryInputNetwork(BaseConfig()), operation_network=MemoryOperationNetwork(MemoryOperationNetworkConfig(operations=[])), lock_network=MemoryLockNetwork(BaseConfig()), opimp_network=MemoryOperationImplementationNetwork(MemoryOperationImplementationNetworkConfig(operations={})), rchecker=MemoryRedundancyChecker(BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))))
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf1714d50>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation edit_feature(edit_feature) with base config
DEBUG:dffml.EditFeatureImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation associate_definition(associate_definition) with base config
DEBUG:dffml.AssociateDefinitionImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf1714d50>: ([Input(value=2, definition=Years), Input(value=5, definition=Expertise), Input(value=0.3, definition=Trust), Input(value=30, definition=Salary)],)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=2, definition=Years), Input(value=5, definition=Expertise), Input(value=0.3, definition=Trust), Input(value=30, definition=Salary)]
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: 98d9c51f39d1e9a1ca0440511400ed26582e770662263ddebb9f25f118e0eca8
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=True>] done
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] edit_feature
DEBUG:dffml.MemoryOrchestratorContext:[98d9c51f39d1e9a1ca0440511400ed26582e770662263ddebb9f25f118e0eca8]: dispatch operation: edit_feature
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=True>] done
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] edit_feature
DEBUG:dffml.MemoryOrchestratorContext:[98d9c51f39d1e9a1ca0440511400ed26582e770662263ddebb9f25f118e0eca8]: dispatch operation: edit_feature
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=True>] done
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] edit_feature
DEBUG:dffml.MemoryOrchestratorContext:[98d9c51f39d1e9a1ca0440511400ed26582e770662263ddebb9f25f118e0eca8]: dispatch operation: edit_feature
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=True>] done
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] edit_feature
DEBUG:dffml.MemoryOrchestratorContext:[98d9c51f39d1e9a1ca0440511400ed26582e770662263ddebb9f25f118e0eca8]: dispatch operation: edit_feature
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: edit_feature
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'features': 0.3}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'updated_features': 3.0}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: edit_feature
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'features': 2}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'updated_features': 20}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: edit_feature
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'features': 30}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'updated_features': 300}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: edit_feature
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'features': 5}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'updated_features': 50}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: associate_definition
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': {'Years': 'feature_data', 'Expertise': 'feature_data', 'Trust': 'feature_data', 'Salary': 'feature_data'}}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'Years': 20, 'Expertise': 50, 'Trust': 3.0, 'Salary': 300}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
INFO:dffml.record:Evaluated 2 {'Years': 20, 'Expertise': 50, 'Trust': 3.0, 'Salary': 300}
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
DEBUG:dffml.MemoryInputNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationNetwork:MemoryOperationNetworkConfig(operations=[])
DEBUG:dffml.MemoryLockNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
DEBUG:dffml.MemoryRedundancyChecker:BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))
DEBUG:dffml.MemoryOrchestrator:MemoryOrchestratorConfig(input_network=MemoryInputNetwork(BaseConfig()), operation_network=MemoryOperationNetwork(MemoryOperationNetworkConfig(operations=[])), lock_network=MemoryLockNetwork(BaseConfig()), opimp_network=MemoryOperationImplementationNetwork(MemoryOperationImplementationNetworkConfig(operations={})), rchecker=MemoryRedundancyChecker(BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))))
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6cf1714d50>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation edit_feature(edit_feature) with base config
DEBUG:dffml.EditFeatureImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation associate_definition(associate_definition) with base config
DEBUG:dffml.AssociateDefinitionImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6cf1714d50>: ([Input(value=3, definition=Years), Input(value=7, definition=Expertise), Input(value=0.4, definition=Trust), Input(value=40, definition=Salary)],)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=3, definition=Years), Input(value=7, definition=Expertise), Input(value=0.4, definition=Trust), Input(value=40, definition=Salary)]
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: 844fc4b2d439e6ed331a01e2e47ac20d29b69c227f42a63c0265ddd2cc7a8e2c
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=True>] done
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] edit_feature
DEBUG:dffml.MemoryOrchestratorContext:[844fc4b2d439e6ed331a01e2e47ac20d29b69c227f42a63c0265ddd2cc7a8e2c]: dispatch operation: edit_feature
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=True>] done
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] edit_feature
DEBUG:dffml.MemoryOrchestratorContext:[844fc4b2d439e6ed331a01e2e47ac20d29b69c227f42a63c0265ddd2cc7a8e2c]: dispatch operation: edit_feature
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=True>] done
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] edit_feature
DEBUG:dffml.MemoryOrchestratorContext:[844fc4b2d439e6ed331a01e2e47ac20d29b69c227f42a63c0265ddd2cc7a8e2c]: dispatch operation: edit_feature
DEBUG:dffml.util.concurrently:[<Task finished coro=<MemoryRedundancyCheckerContext._take() done, defined at /usr/src/dffml/dffml/df/memory.py:710> result=True>] done
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] edit_feature
DEBUG:dffml.MemoryOrchestratorContext:[844fc4b2d439e6ed331a01e2e47ac20d29b69c227f42a63c0265ddd2cc7a8e2c]: dispatch operation: edit_feature
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: edit_feature
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'features': 3}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'updated_features': 30}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: edit_feature
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'features': 40}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'updated_features': 400}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: edit_feature
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'features': 7}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'updated_features': 70}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: edit_feature
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'features': 0.4}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'updated_features': 4.0}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: associate_definition
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': {'Years': 'feature_data', 'Expertise': 'feature_data', 'Trust': 'feature_data', 'Salary': 'feature_data'}}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {'Years': 30, 'Expertise': 70, 'Trust': 4.0, 'Salary': 400}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
INFO:dffml.record:Evaluated 3 {'Years': 30, 'Expertise': 70, 'Trust': 4.0, 'Salary': 400}
DEBUG:dffml.MemoryOrchestratorContext:ctx.outstanding: -1
ok
test_tag (tests.source.test_json.TestJSONSource) ... DEBUG:dffml.JSONSource:JSONSourceConfig(filename='/tmp/tmpxr99oop9/0.015731087681827405', tag='untagged', readwrite=True, allowempty=True)
DEBUG:dffml.JSONSource:JSONSourceConfig(filename='/tmp/tmpxr99oop9/0.015731087681827405', tag='untagged', readwrite=True, allowempty=True)
DEBUG:dffml.JSONSource:PosixPath('/tmp/tmpxr99oop9/0.015731087681827405') is not a file, initializing memory to empty dict
DEBUG:dffml.JSONSource:/tmp/tmpxr99oop9/0.015731087681827405 first open
DEBUG:dffml.JSONSource:PosixPath('/tmp/tmpxr99oop9/0.015731087681827405') is not a file, initializing memory to empty dict
DEBUG:dffml.JSONSource:/tmp/tmpxr99oop9/0.015731087681827405 already open
DEBUG:dffml.JSONSource:/tmp/tmpxr99oop9/0.015731087681827405 updated
DEBUG:dffml.source.json:JSONSource(JSONSourceConfig(filename=PosixPath('/tmp/tmpxr99oop9/0.015731087681827405'), tag='sometag', readwrite=True, allowempty=True)) saved 1 records
DEBUG:dffml.JSONSource:/tmp/tmpxr99oop9/0.015731087681827405 updated
DEBUG:dffml.JSONSource:/tmp/tmpxr99oop9/0.015731087681827405 written
DEBUG:dffml.source.json:JSONSource(JSONSourceConfig(filename=PosixPath('/tmp/tmpxr99oop9/0.015731087681827405'), tag='untagged', readwrite=True, allowempty=True)) saved 1 records
DEBUG:dffml.JSONSource:/tmp/tmpxr99oop9/0.015731087681827405 first open
DEBUG:dffml.source.json:JSONSource(JSONSourceConfig(filename=PosixPath('/tmp/tmpxr99oop9/0.015731087681827405'), tag='untagged', readwrite=True, allowempty=True)) loaded 1 records
DEBUG:dffml.JSONSource:/tmp/tmpxr99oop9/0.015731087681827405 already open
DEBUG:dffml.source.json:JSONSource(JSONSourceConfig(filename=PosixPath('/tmp/tmpxr99oop9/0.015731087681827405'), tag='sometag', readwrite=True, allowempty=True)) loaded 1 records
DEBUG:dffml.JSONSource:/tmp/tmpxr99oop9/0.015731087681827405 updated
DEBUG:dffml.source.json:JSONSource(JSONSourceConfig(filename=PosixPath('/tmp/tmpxr99oop9/0.015731087681827405'), tag='sometag', readwrite=True, allowempty=True)) saved 1 records
DEBUG:dffml.JSONSource:/tmp/tmpxr99oop9/0.015731087681827405 updated
DEBUG:dffml.JSONSource:/tmp/tmpxr99oop9/0.015731087681827405 written
DEBUG:dffml.source.json:JSONSource(JSONSourceConfig(filename=PosixPath('/tmp/tmpxr99oop9/0.015731087681827405'), tag='untagged', readwrite=True, allowempty=True)) saved 1 records
ok
test_update (tests.source.test_json.TestJSONSource) ... DEBUG:dffml.JSONSource:JSONSourceConfig(filename='/tmp/tmpj_9it8x7/0.48163963308143976', tag='untagged', readwrite=True, allowempty=True)
DEBUG:dffml.JSONSource:PosixPath('/tmp/tmpj_9it8x7/0.48163963308143976') is not a file, initializing memory to empty dict
DEBUG:dffml.JSONSource:/tmp/tmpj_9it8x7/0.48163963308143976 first open
DEBUG:dffml.JSONSource:/tmp/tmpj_9it8x7/0.48163963308143976 updated
DEBUG:dffml.JSONSource:/tmp/tmpj_9it8x7/0.48163963308143976 written
DEBUG:dffml.source.json:JSONSource(JSONSourceConfig(filename=PosixPath('/tmp/tmpj_9it8x7/0.48163963308143976'), tag='untagged', readwrite=True, allowempty=True)) saved 2 records
DEBUG:dffml.JSONSource:/tmp/tmpj_9it8x7/0.48163963308143976 first open
DEBUG:dffml.source.json:JSONSource(JSONSourceConfig(filename=PosixPath('/tmp/tmpj_9it8x7/0.48163963308143976'), tag='untagged', readwrite=True, allowempty=True)) loaded 2 records
DEBUG:dffml.JSONSource:/tmp/tmpj_9it8x7/0.48163963308143976 updated
DEBUG:dffml.JSONSource:/tmp/tmpj_9it8x7/0.48163963308143976 written
DEBUG:dffml.source.json:JSONSource(JSONSourceConfig(filename=PosixPath('/tmp/tmpj_9it8x7/0.48163963308143976'), tag='untagged', readwrite=True, allowempty=True)) saved 2 records
DEBUG:dffml.JSONSource:JSONSourceConfig(filename='/tmp/tmpj_9it8x7/0.8472380623393255.xz', tag='untagged', readwrite=True, allowempty=True)
DEBUG:dffml.JSONSource:PosixPath('/tmp/tmpj_9it8x7/0.8472380623393255.xz') is not a file, initializing memory to empty dict
DEBUG:dffml.JSONSource:/tmp/tmpj_9it8x7/0.8472380623393255.xz first open
DEBUG:dffml.JSONSource:/tmp/tmpj_9it8x7/0.8472380623393255.xz updated
DEBUG:dffml.JSONSource:/tmp/tmpj_9it8x7/0.8472380623393255.xz written
DEBUG:dffml.source.json:JSONSource(JSONSourceConfig(filename=PosixPath('/tmp/tmpj_9it8x7/0.8472380623393255.xz'), tag='untagged', readwrite=True, allowempty=True)) saved 2 records
DEBUG:dffml.JSONSource:/tmp/tmpj_9it8x7/0.8472380623393255.xz first open
DEBUG:dffml.source.json:JSONSource(JSONSourceConfig(filename=PosixPath('/tmp/tmpj_9it8x7/0.8472380623393255.xz'), tag='untagged', readwrite=True, allowempty=True)) loaded 2 records
DEBUG:dffml.JSONSource:/tmp/tmpj_9it8x7/0.8472380623393255.xz updated
DEBUG:dffml.JSONSource:/tmp/tmpj_9it8x7/0.8472380623393255.xz written
DEBUG:dffml.source.json:JSONSource(JSONSourceConfig(filename=PosixPath('/tmp/tmpj_9it8x7/0.8472380623393255.xz'), tag='untagged', readwrite=True, allowempty=True)) saved 2 records
DEBUG:dffml.JSONSource:JSONSourceConfig(filename='/tmp/tmpj_9it8x7/0.9696702057785735.gz', tag='untagged', readwrite=True, allowempty=True)
DEBUG:dffml.JSONSource:PosixPath('/tmp/tmpj_9it8x7/0.9696702057785735.gz') is not a file, initializing memory to empty dict
DEBUG:dffml.JSONSource:/tmp/tmpj_9it8x7/0.9696702057785735.gz first open
DEBUG:dffml.JSONSource:/tmp/tmpj_9it8x7/0.9696702057785735.gz updated
DEBUG:dffml.JSONSource:/tmp/tmpj_9it8x7/0.9696702057785735.gz written
DEBUG:dffml.source.json:JSONSource(JSONSourceConfig(filename=PosixPath('/tmp/tmpj_9it8x7/0.9696702057785735.gz'), tag='untagged', readwrite=True, allowempty=True)) saved 2 records
DEBUG:dffml.JSONSource:/tmp/tmpj_9it8x7/0.9696702057785735.gz first open
DEBUG:dffml.source.json:JSONSource(JSONSourceConfig(filename=PosixPath('/tmp/tmpj_9it8x7/0.9696702057785735.gz'), tag='untagged', readwrite=True, allowempty=True)) loaded 2 records
DEBUG:dffml.JSONSource:/tmp/tmpj_9it8x7/0.9696702057785735.gz updated
DEBUG:dffml.JSONSource:/tmp/tmpj_9it8x7/0.9696702057785735.gz written
DEBUG:dffml.source.json:JSONSource(JSONSourceConfig(filename=PosixPath('/tmp/tmpj_9it8x7/0.9696702057785735.gz'), tag='untagged', readwrite=True, allowempty=True)) saved 2 records
DEBUG:dffml.JSONSource:JSONSourceConfig(filename='/tmp/tmpj_9it8x7/0.5890494789529613.bz2', tag='untagged', readwrite=True, allowempty=True)
DEBUG:dffml.JSONSource:PosixPath('/tmp/tmpj_9it8x7/0.5890494789529613.bz2') is not a file, initializing memory to empty dict
DEBUG:dffml.JSONSource:/tmp/tmpj_9it8x7/0.5890494789529613.bz2 first open
DEBUG:dffml.JSONSource:/tmp/tmpj_9it8x7/0.5890494789529613.bz2 updated
DEBUG:dffml.JSONSource:/tmp/tmpj_9it8x7/0.5890494789529613.bz2 written
DEBUG:dffml.source.json:JSONSource(JSONSourceConfig(filename=PosixPath('/tmp/tmpj_9it8x7/0.5890494789529613.bz2'), tag='untagged', readwrite=True, allowempty=True)) saved 2 records
DEBUG:dffml.JSONSource:/tmp/tmpj_9it8x7/0.5890494789529613.bz2 first open
DEBUG:dffml.source.json:JSONSource(JSONSourceConfig(filename=PosixPath('/tmp/tmpj_9it8x7/0.5890494789529613.bz2'), tag='untagged', readwrite=True, allowempty=True)) loaded 2 records
DEBUG:dffml.JSONSource:/tmp/tmpj_9it8x7/0.5890494789529613.bz2 updated
DEBUG:dffml.JSONSource:/tmp/tmpj_9it8x7/0.5890494789529613.bz2 written
DEBUG:dffml.source.json:JSONSource(JSONSourceConfig(filename=PosixPath('/tmp/tmpj_9it8x7/0.5890494789529613.bz2'), tag='untagged', readwrite=True, allowempty=True)) saved 2 records
DEBUG:dffml.JSONSource:JSONSourceConfig(filename='/tmp/tmpj_9it8x7/0.4401785237761131.lzma', tag='untagged', readwrite=True, allowempty=True)
DEBUG:dffml.JSONSource:PosixPath('/tmp/tmpj_9it8x7/0.4401785237761131.lzma') is not a file, initializing memory to empty dict
DEBUG:dffml.JSONSource:/tmp/tmpj_9it8x7/0.4401785237761131.lzma first open
DEBUG:dffml.JSONSource:/tmp/tmpj_9it8x7/0.4401785237761131.lzma updated
DEBUG:dffml.JSONSource:/tmp/tmpj_9it8x7/0.4401785237761131.lzma written
DEBUG:dffml.source.json:JSONSource(JSONSourceConfig(filename=PosixPath('/tmp/tmpj_9it8x7/0.4401785237761131.lzma'), tag='untagged', readwrite=True, allowempty=True)) saved 2 records
DEBUG:dffml.JSONSource:/tmp/tmpj_9it8x7/0.4401785237761131.lzma first open
DEBUG:dffml.source.json:JSONSource(JSONSourceConfig(filename=PosixPath('/tmp/tmpj_9it8x7/0.4401785237761131.lzma'), tag='untagged', readwrite=True, allowempty=True)) loaded 2 records
DEBUG:dffml.JSONSource:/tmp/tmpj_9it8x7/0.4401785237761131.lzma updated
DEBUG:dffml.JSONSource:/tmp/tmpj_9it8x7/0.4401785237761131.lzma written
DEBUG:dffml.source.json:JSONSource(JSONSourceConfig(filename=PosixPath('/tmp/tmpj_9it8x7/0.4401785237761131.lzma'), tag='untagged', readwrite=True, allowempty=True)) saved 2 records
DEBUG:dffml.JSONSource:JSONSourceConfig(filename='/tmp/tmpj_9it8x7/0.17735032062320322.zip', tag='untagged', readwrite=True, allowempty=True)
DEBUG:dffml.JSONSource:PosixPath('/tmp/tmpj_9it8x7/0.17735032062320322.zip') is not a file, initializing memory to empty dict
DEBUG:dffml.JSONSource:/tmp/tmpj_9it8x7/0.17735032062320322.zip first open
DEBUG:dffml.JSONSource:/tmp/tmpj_9it8x7/0.17735032062320322.zip updated
DEBUG:dffml.JSONSource:/tmp/tmpj_9it8x7/0.17735032062320322.zip written
DEBUG:dffml.source.json:JSONSource(JSONSourceConfig(filename=PosixPath('/tmp/tmpj_9it8x7/0.17735032062320322.zip'), tag='untagged', readwrite=True, allowempty=True)) saved 2 records
DEBUG:dffml.JSONSource:/tmp/tmpj_9it8x7/0.17735032062320322.zip first open
DEBUG:dffml.source.json:JSONSource(JSONSourceConfig(filename=PosixPath('/tmp/tmpj_9it8x7/0.17735032062320322.zip'), tag='untagged', readwrite=True, allowempty=True)) loaded 2 records
DEBUG:dffml.JSONSource:/tmp/tmpj_9it8x7/0.17735032062320322.zip updated
DEBUG:dffml.JSONSource:/tmp/tmpj_9it8x7/0.17735032062320322.zip written
DEBUG:dffml.source.json:JSONSource(JSONSourceConfig(filename=PosixPath('/tmp/tmpj_9it8x7/0.17735032062320322.zip'), tag='untagged', readwrite=True, allowempty=True)) saved 2 records
ok
test_config_default (tests.source.test_csv.TestCSVSource) ... ok
test_config_set (tests.source.test_csv.TestCSVSource) ... ok
test_key (tests.source.test_csv.TestCSVSource) ... DEBUG:dffml.CSVSource:CSVSourceConfig(filename='/tmp/tmppvcgfyu_', tag='untagged', readwrite=False, allowempty=False, key='KeyHeader', tagcol='tag', loadfiles=None)
DEBUG:dffml.CSVSource:/tmp/tmppvcgfyu_ first open
DEBUG:dffml.CSVSource:CSVSource(CSVSourceConfig(filename=PosixPath('/tmp/tmppvcgfyu_'), tag='untagged', readwrite=False, allowempty=False, key='KeyHeader', tagcol='tag', loadfiles=None)) loaded 2 records
ok
test_tag (tests.source.test_csv.TestCSVSource) ... DEBUG:dffml.CSVSource:CSVSourceConfig(filename='/tmp/tmpv6pf2u5v/0.6675926119602487', tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)
DEBUG:dffml.CSVSource:CSVSourceConfig(filename='/tmp/tmpv6pf2u5v/0.6675926119602487', tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)
DEBUG:dffml.CSVSource:PosixPath('/tmp/tmpv6pf2u5v/0.6675926119602487') is not a file, initializing memory to empty dict
DEBUG:dffml.CSVSource:/tmp/tmpv6pf2u5v/0.6675926119602487 first open
DEBUG:dffml.CSVSource:PosixPath('/tmp/tmpv6pf2u5v/0.6675926119602487') is not a file, initializing memory to empty dict
DEBUG:dffml.CSVSource:/tmp/tmpv6pf2u5v/0.6675926119602487 already open
DEBUG:dffml.CSVSource:fieldnames: ['key', 'tag', 'face', 'feed']
DEBUG:dffml.CSVSource:/tmp/tmpv6pf2u5v/0.6675926119602487 written
DEBUG:dffml.CSVSource:CSVSource(CSVSourceConfig(filename=PosixPath('/tmp/tmpv6pf2u5v/0.6675926119602487'), tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)) saved 1 records
ok
test_update (tests.source.test_csv.TestCSVSource) ... DEBUG:dffml.CSVSource:CSVSourceConfig(filename='/tmp/tmpi9826_h7/0.24976426595539536', tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)
DEBUG:dffml.CSVSource:PosixPath('/tmp/tmpi9826_h7/0.24976426595539536') is not a file, initializing memory to empty dict
DEBUG:dffml.CSVSource:/tmp/tmpi9826_h7/0.24976426595539536 first open
DEBUG:dffml.CSVSource:fieldnames: ['key', 'tag', 'PetalLength', 'PetalWidth', 'SepalLength', 'SepalWidth', 'prediction_target_name', 'confidence_target_name']
DEBUG:dffml.CSVSource:/tmp/tmpi9826_h7/0.24976426595539536 written
DEBUG:dffml.CSVSource:CSVSource(CSVSourceConfig(filename=PosixPath('/tmp/tmpi9826_h7/0.24976426595539536'), tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)) saved 2 records
DEBUG:dffml.CSVSource:/tmp/tmpi9826_h7/0.24976426595539536 first open
DEBUG:dffml.CSVSource:CSVSource(CSVSourceConfig(filename=PosixPath('/tmp/tmpi9826_h7/0.24976426595539536'), tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)) loaded 2 records
DEBUG:dffml.CSVSource:fieldnames: ['key', 'tag', 'PetalLength', 'PetalWidth', 'SepalLength', 'SepalWidth', 'prediction_target_name', 'confidence_target_name']
DEBUG:dffml.CSVSource:/tmp/tmpi9826_h7/0.24976426595539536 written
DEBUG:dffml.CSVSource:CSVSource(CSVSourceConfig(filename=PosixPath('/tmp/tmpi9826_h7/0.24976426595539536'), tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)) saved 2 records
DEBUG:dffml.CSVSource:CSVSourceConfig(filename='/tmp/tmpi9826_h7/0.12699657491979877.xz', tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)
DEBUG:dffml.CSVSource:PosixPath('/tmp/tmpi9826_h7/0.12699657491979877.xz') is not a file, initializing memory to empty dict
DEBUG:dffml.CSVSource:/tmp/tmpi9826_h7/0.12699657491979877.xz first open
DEBUG:dffml.CSVSource:fieldnames: ['key', 'tag', 'PetalLength', 'PetalWidth', 'SepalLength', 'SepalWidth', 'prediction_target_name', 'confidence_target_name']
DEBUG:dffml.CSVSource:/tmp/tmpi9826_h7/0.12699657491979877.xz written
DEBUG:dffml.CSVSource:CSVSource(CSVSourceConfig(filename=PosixPath('/tmp/tmpi9826_h7/0.12699657491979877.xz'), tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)) saved 2 records
DEBUG:dffml.CSVSource:/tmp/tmpi9826_h7/0.12699657491979877.xz first open
DEBUG:dffml.CSVSource:CSVSource(CSVSourceConfig(filename=PosixPath('/tmp/tmpi9826_h7/0.12699657491979877.xz'), tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)) loaded 2 records
DEBUG:dffml.CSVSource:fieldnames: ['key', 'tag', 'PetalLength', 'PetalWidth', 'SepalLength', 'SepalWidth', 'prediction_target_name', 'confidence_target_name']
DEBUG:dffml.CSVSource:/tmp/tmpi9826_h7/0.12699657491979877.xz written
DEBUG:dffml.CSVSource:CSVSource(CSVSourceConfig(filename=PosixPath('/tmp/tmpi9826_h7/0.12699657491979877.xz'), tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)) saved 2 records
DEBUG:dffml.CSVSource:CSVSourceConfig(filename='/tmp/tmpi9826_h7/0.7237867011100502.gz', tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)
DEBUG:dffml.CSVSource:PosixPath('/tmp/tmpi9826_h7/0.7237867011100502.gz') is not a file, initializing memory to empty dict
DEBUG:dffml.CSVSource:/tmp/tmpi9826_h7/0.7237867011100502.gz first open
DEBUG:dffml.CSVSource:fieldnames: ['key', 'tag', 'PetalLength', 'PetalWidth', 'SepalLength', 'SepalWidth', 'prediction_target_name', 'confidence_target_name']
DEBUG:dffml.CSVSource:/tmp/tmpi9826_h7/0.7237867011100502.gz written
DEBUG:dffml.CSVSource:CSVSource(CSVSourceConfig(filename=PosixPath('/tmp/tmpi9826_h7/0.7237867011100502.gz'), tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)) saved 2 records
DEBUG:dffml.CSVSource:/tmp/tmpi9826_h7/0.7237867011100502.gz first open
DEBUG:dffml.CSVSource:CSVSource(CSVSourceConfig(filename=PosixPath('/tmp/tmpi9826_h7/0.7237867011100502.gz'), tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)) loaded 2 records
DEBUG:dffml.CSVSource:fieldnames: ['key', 'tag', 'PetalLength', 'PetalWidth', 'SepalLength', 'SepalWidth', 'prediction_target_name', 'confidence_target_name']
DEBUG:dffml.CSVSource:/tmp/tmpi9826_h7/0.7237867011100502.gz written
DEBUG:dffml.CSVSource:CSVSource(CSVSourceConfig(filename=PosixPath('/tmp/tmpi9826_h7/0.7237867011100502.gz'), tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)) saved 2 records
DEBUG:dffml.CSVSource:CSVSourceConfig(filename='/tmp/tmpi9826_h7/0.34402400167931624.bz2', tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)
DEBUG:dffml.CSVSource:PosixPath('/tmp/tmpi9826_h7/0.34402400167931624.bz2') is not a file, initializing memory to empty dict
DEBUG:dffml.CSVSource:/tmp/tmpi9826_h7/0.34402400167931624.bz2 first open
DEBUG:dffml.CSVSource:fieldnames: ['key', 'tag', 'PetalLength', 'PetalWidth', 'SepalLength', 'SepalWidth', 'prediction_target_name', 'confidence_target_name']
DEBUG:dffml.CSVSource:/tmp/tmpi9826_h7/0.34402400167931624.bz2 written
DEBUG:dffml.CSVSource:CSVSource(CSVSourceConfig(filename=PosixPath('/tmp/tmpi9826_h7/0.34402400167931624.bz2'), tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)) saved 2 records
DEBUG:dffml.CSVSource:/tmp/tmpi9826_h7/0.34402400167931624.bz2 first open
DEBUG:dffml.CSVSource:CSVSource(CSVSourceConfig(filename=PosixPath('/tmp/tmpi9826_h7/0.34402400167931624.bz2'), tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)) loaded 2 records
DEBUG:dffml.CSVSource:fieldnames: ['key', 'tag', 'PetalLength', 'PetalWidth', 'SepalLength', 'SepalWidth', 'prediction_target_name', 'confidence_target_name']
DEBUG:dffml.CSVSource:/tmp/tmpi9826_h7/0.34402400167931624.bz2 written
DEBUG:dffml.CSVSource:CSVSource(CSVSourceConfig(filename=PosixPath('/tmp/tmpi9826_h7/0.34402400167931624.bz2'), tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)) saved 2 records
DEBUG:dffml.CSVSource:CSVSourceConfig(filename='/tmp/tmpi9826_h7/0.9160858973232794.lzma', tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)
DEBUG:dffml.CSVSource:PosixPath('/tmp/tmpi9826_h7/0.9160858973232794.lzma') is not a file, initializing memory to empty dict
DEBUG:dffml.CSVSource:/tmp/tmpi9826_h7/0.9160858973232794.lzma first open
DEBUG:dffml.CSVSource:fieldnames: ['key', 'tag', 'PetalLength', 'PetalWidth', 'SepalLength', 'SepalWidth', 'prediction_target_name', 'confidence_target_name']
DEBUG:dffml.CSVSource:/tmp/tmpi9826_h7/0.9160858973232794.lzma written
DEBUG:dffml.CSVSource:CSVSource(CSVSourceConfig(filename=PosixPath('/tmp/tmpi9826_h7/0.9160858973232794.lzma'), tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)) saved 2 records
DEBUG:dffml.CSVSource:/tmp/tmpi9826_h7/0.9160858973232794.lzma first open
DEBUG:dffml.CSVSource:CSVSource(CSVSourceConfig(filename=PosixPath('/tmp/tmpi9826_h7/0.9160858973232794.lzma'), tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)) loaded 2 records
DEBUG:dffml.CSVSource:fieldnames: ['key', 'tag', 'PetalLength', 'PetalWidth', 'SepalLength', 'SepalWidth', 'prediction_target_name', 'confidence_target_name']
DEBUG:dffml.CSVSource:/tmp/tmpi9826_h7/0.9160858973232794.lzma written
DEBUG:dffml.CSVSource:CSVSource(CSVSourceConfig(filename=PosixPath('/tmp/tmpi9826_h7/0.9160858973232794.lzma'), tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)) saved 2 records
DEBUG:dffml.CSVSource:CSVSourceConfig(filename='/tmp/tmpi9826_h7/0.30245031446966775.zip', tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)
DEBUG:dffml.CSVSource:PosixPath('/tmp/tmpi9826_h7/0.30245031446966775.zip') is not a file, initializing memory to empty dict
DEBUG:dffml.CSVSource:/tmp/tmpi9826_h7/0.30245031446966775.zip first open
DEBUG:dffml.CSVSource:fieldnames: ['key', 'tag', 'PetalLength', 'PetalWidth', 'SepalLength', 'SepalWidth', 'prediction_target_name', 'confidence_target_name']
DEBUG:dffml.CSVSource:/tmp/tmpi9826_h7/0.30245031446966775.zip written
DEBUG:dffml.CSVSource:CSVSource(CSVSourceConfig(filename=PosixPath('/tmp/tmpi9826_h7/0.30245031446966775.zip'), tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)) saved 2 records
DEBUG:dffml.CSVSource:/tmp/tmpi9826_h7/0.30245031446966775.zip first open
DEBUG:dffml.CSVSource:CSVSource(CSVSourceConfig(filename=PosixPath('/tmp/tmpi9826_h7/0.30245031446966775.zip'), tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)) loaded 2 records
DEBUG:dffml.CSVSource:fieldnames: ['key', 'tag', 'PetalLength', 'PetalWidth', 'SepalLength', 'SepalWidth', 'prediction_target_name', 'confidence_target_name']
DEBUG:dffml.CSVSource:/tmp/tmpi9826_h7/0.30245031446966775.zip written
DEBUG:dffml.CSVSource:CSVSource(CSVSourceConfig(filename=PosixPath('/tmp/tmpi9826_h7/0.30245031446966775.zip'), tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)) saved 2 records
ok
test_idx1 (tests.source.test_idx.TestIDXSources) ... DEBUG:dffml.IDX1Source:IDX1SourceConfig(filename='/usr/src/dffml/tests/source/train-labels-idx1-ubyte.gz', feature='label', readwrite=False, allowempty=False)
DEBUG:dffml.IDX1Source:IDX1Source(IDX1SourceConfig(filename=PosixPath('/usr/src/dffml/tests/source/train-labels-idx1-ubyte.gz'), feature='label', readwrite=False, allowempty=False)) loaded 60000 records
ok
test_idx3 (tests.source.test_idx.TestIDXSources) ... DEBUG:dffml.IDX3Source:IDX3SourceConfig(filename='/usr/src/dffml/tests/source/train-images-idx3-ubyte.gz', feature='image', readwrite=False, allowempty=False)
DEBUG:dffml.IDX3Source:IDX3Source(IDX3SourceConfig(filename=PosixPath('/usr/src/dffml/tests/source/train-images-idx3-ubyte.gz'), feature='image', readwrite=False, allowempty=False)) loaded 60000 records
ok
DEBUG:dffml.SqliteDatabase:SqliteDatabaseConfig(filename='/tmp/tmp_36cojak.db')
test_update (tests.source.test_db.TestDbSource) ... DEBUG:dffml.DbSource:DbSourceConfig(db=SqliteDatabase(SqliteDatabaseConfig(filename='/tmp/tmp_36cojak.db')), table_name='TestTable', model_columns=['key', 'feature_PetalLength', 'feature_PetalWidth', 'feature_SepalLength', 'feature_SepalWidth', 'target_name_confidence', 'target_name_value'])
DEBUG:dffml.SqliteDatabaseContext:INSERT INTO TestTable ( `key`, `feature_PetalLength`, `feature_PetalWidth`, `feature_SepalLength`, `feature_SepalWidth`, `target_name_confidence`, `target_name_value` ) VALUES( ?, ?, ?, ?, ?, ?, ? )
DEBUG:dffml.SqliteDatabaseContext:SELECT * FROM TestTable WHERE ((`key` = ? ))
DEBUG:dffml.DbSourceContext:update: 0
PetalLength 3.9
PetalWidth 1.2
SepalLength 5.8
SepalWidth 2.7
Predictions
target_name
value:feedface, confidence:0.42
DEBUG:dffml.SqliteDatabaseContext:INSERT INTO TestTable ( `key`, `feature_PetalLength`, `feature_PetalWidth`, `feature_SepalLength`, `feature_SepalWidth`, `target_name_confidence`, `target_name_value` ) VALUES( ?, ?, ?, ?, ?, ?, ? )
DEBUG:dffml.SqliteDatabaseContext:SELECT * FROM TestTable WHERE ((`key` = ? ))
DEBUG:dffml.DbSourceContext:update: 1
PetalLength 3.9
PetalWidth 1.2
SepalLength 5.8
SepalWidth 2.7
Predictions
target_name
value:undetermined, confidence:1.0
DEBUG:dffml.SqliteDatabaseContext:SELECT * FROM TestTable WHERE ((`key` = ? ))
DEBUG:dffml.SqliteDatabaseContext:SELECT * FROM TestTable WHERE ((`key` = ? ))
DEBUG:dffml.SqliteDatabaseContext:SELECT * FROM TestTable
ok
test_00_args (tests.test_base.TestAutoArgsConfig) ... ok
test_config_defaults (tests.test_base.TestAutoArgsConfig) ... DEBUG:dffml.JSONSource:JSONSourceConfig(filename='file.json', tag='untagged', readwrite=False, allowempty=False)
ok
test_config_set (tests.test_base.TestAutoArgsConfig) ... DEBUG:dffml.CSVSource:CSVSourceConfig(filename='file.csv', tag='untagged', readwrite=False, allowempty=False, key='key', tagcol='tag', loadfiles=None)
ok
test_CONFIG (tests.test_base.TestCONFIG) ... DEBUG:dffml.FakeTesting3:FakeTestingConfig3(label='unlabeled')
ok
test_bump_version (tests.service.test_dev.TestBumpPackages) ... ok
test_model (tests.service.test_dev.TestDevelopCreate) ... DEBUG:dffml.service.dev.create_from_skel.<locals>.CreateCMD:Setting description = None
DEBUG:dffml.service.dev.create_from_skel.<locals>.CreateCMD:Setting email = 'unknown@example.com'
DEBUG:dffml.service.dev.create_from_skel.<locals>.CreateCMD:Setting log = 20
DEBUG:dffml.service.dev.create_from_skel.<locals>.CreateCMD:Setting name = 'Unknown'
DEBUG:dffml.service.dev.create_from_skel.<locals>.CreateCMD:Setting package = 'test-package'
DEBUG:dffml.service.dev.create_from_skel.<locals>.CreateCMD:Setting target = '/tmp/tmptdtgjq8n/dot'
DEBUG:dffml.service.dev.create_from_skel.<locals>.CreateCMD:Setting user = 'hashim'
copying /usr/src/dffml/dffml/skel/common/pyproject.toml -> /tmp/tmptdtgjq8n/dot
copying /usr/src/dffml/dffml/skel/common/.gitignore -> /tmp/tmptdtgjq8n/dot
creating /tmp/tmptdtgjq8n/dot/tests
copying /usr/src/dffml/dffml/skel/common/tests/__init__.py -> /tmp/tmptdtgjq8n/dot/tests
copying /usr/src/dffml/dffml/skel/common/.coveragerc -> /tmp/tmptdtgjq8n/dot
copying /usr/src/dffml/dffml/skel/common/LICENSE -> /tmp/tmptdtgjq8n/dot
copying /usr/src/dffml/dffml/skel/common/setup_common.py -> /tmp/tmptdtgjq8n/dot
creating /tmp/tmptdtgjq8n/dot/REPLACE_IMPORT_PACKAGE_NAME
copying /usr/src/dffml/dffml/skel/common/REPLACE_IMPORT_PACKAGE_NAME/version.py -> /tmp/tmptdtgjq8n/dot/REPLACE_IMPORT_PACKAGE_NAME
copying /usr/src/dffml/dffml/skel/common/REPLACE_IMPORT_PACKAGE_NAME/__init__.py -> /tmp/tmptdtgjq8n/dot/REPLACE_IMPORT_PACKAGE_NAME
copying /usr/src/dffml/dffml/skel/common/MANIFEST.in -> /tmp/tmptdtgjq8n/dot
copying /usr/src/dffml/dffml/skel/model/setup.py -> /tmp/tmptdtgjq8n/dot
copying /usr/src/dffml/dffml/skel/model/pyproject.toml -> /tmp/tmptdtgjq8n/dot
copying /usr/src/dffml/dffml/skel/model/.gitignore -> /tmp/tmptdtgjq8n/dot
copying /usr/src/dffml/dffml/skel/model/tests/test_model.py -> /tmp/tmptdtgjq8n/dot/tests
copying /usr/src/dffml/dffml/skel/model/tests/__init__.py -> /tmp/tmptdtgjq8n/dot/tests
copying /usr/src/dffml/dffml/skel/model/.coveragerc -> /tmp/tmptdtgjq8n/dot
copying /usr/src/dffml/dffml/skel/model/LICENSE -> /tmp/tmptdtgjq8n/dot
copying /usr/src/dffml/dffml/skel/model/README.md -> /tmp/tmptdtgjq8n/dot
copying /usr/src/dffml/dffml/skel/model/setup_common.py -> /tmp/tmptdtgjq8n/dot
copying /usr/src/dffml/dffml/skel/model/REPLACE_IMPORT_PACKAGE_NAME/version.py -> /tmp/tmptdtgjq8n/dot/REPLACE_IMPORT_PACKAGE_NAME
copying /usr/src/dffml/dffml/skel/model/REPLACE_IMPORT_PACKAGE_NAME/__init__.py -> /tmp/tmptdtgjq8n/dot/REPLACE_IMPORT_PACKAGE_NAME
copying /usr/src/dffml/dffml/skel/model/REPLACE_IMPORT_PACKAGE_NAME/misc.py -> /tmp/tmptdtgjq8n/dot/REPLACE_IMPORT_PACKAGE_NAME
copying /usr/src/dffml/dffml/skel/model/MANIFEST.in -> /tmp/tmptdtgjq8n/dot
DEBUG:dffml.service.dev.create_from_skel.<locals>.CreateCMD:Setting description = None
DEBUG:dffml.service.dev.create_from_skel.<locals>.CreateCMD:Setting email = 'unknown@example.com'
DEBUG:dffml.service.dev.create_from_skel.<locals>.CreateCMD:Setting log = 20
DEBUG:dffml.service.dev.create_from_skel.<locals>.CreateCMD:Setting name = 'Unknown'
DEBUG:dffml.service.dev.create_from_skel.<locals>.CreateCMD:Setting package = 'test-package'
DEBUG:dffml.service.dev.create_from_skel.<locals>.CreateCMD:Setting target = None
DEBUG:dffml.service.dev.create_from_skel.<locals>.CreateCMD:Setting user = 'hashim'
creating test-package
copying /usr/src/dffml/dffml/skel/common/pyproject.toml -> test-package
copying /usr/src/dffml/dffml/skel/common/.gitignore -> test-package
creating test-package/tests
copying /usr/src/dffml/dffml/skel/common/tests/__init__.py -> test-package/tests
copying /usr/src/dffml/dffml/skel/common/.coveragerc -> test-package
copying /usr/src/dffml/dffml/skel/common/LICENSE -> test-package
copying /usr/src/dffml/dffml/skel/common/setup_common.py -> test-package
creating test-package/REPLACE_IMPORT_PACKAGE_NAME
copying /usr/src/dffml/dffml/skel/common/REPLACE_IMPORT_PACKAGE_NAME/version.py -> test-package/REPLACE_IMPORT_PACKAGE_NAME
copying /usr/src/dffml/dffml/skel/common/REPLACE_IMPORT_PACKAGE_NAME/__init__.py -> test-package/REPLACE_IMPORT_PACKAGE_NAME
copying /usr/src/dffml/dffml/skel/common/MANIFEST.in -> test-package
copying /usr/src/dffml/dffml/skel/model/setup.py -> test-package
copying /usr/src/dffml/dffml/skel/model/pyproject.toml -> test-package
copying /usr/src/dffml/dffml/skel/model/.gitignore -> test-package
copying /usr/src/dffml/dffml/skel/model/tests/test_model.py -> test-package/tests
copying /usr/src/dffml/dffml/skel/model/tests/__init__.py -> test-package/tests
copying /usr/src/dffml/dffml/skel/model/.coveragerc -> test-package
copying /usr/src/dffml/dffml/skel/model/LICENSE -> test-package
copying /usr/src/dffml/dffml/skel/model/README.md -> test-package
copying /usr/src/dffml/dffml/skel/model/setup_common.py -> test-package
copying /usr/src/dffml/dffml/skel/model/REPLACE_IMPORT_PACKAGE_NAME/version.py -> test-package/REPLACE_IMPORT_PACKAGE_NAME
copying /usr/src/dffml/dffml/skel/model/REPLACE_IMPORT_PACKAGE_NAME/__init__.py -> test-package/REPLACE_IMPORT_PACKAGE_NAME
copying /usr/src/dffml/dffml/skel/model/REPLACE_IMPORT_PACKAGE_NAME/misc.py -> test-package/REPLACE_IMPORT_PACKAGE_NAME
copying /usr/src/dffml/dffml/skel/model/MANIFEST.in -> test-package
ok
test_operations (tests.service.test_dev.TestDevelopCreate) ... DEBUG:dffml.service.dev.create_from_skel.<locals>.CreateCMD:Setting description = None
DEBUG:dffml.service.dev.create_from_skel.<locals>.CreateCMD:Setting email = 'unknown@example.com'
DEBUG:dffml.service.dev.create_from_skel.<locals>.CreateCMD:Setting log = 20
DEBUG:dffml.service.dev.create_from_skel.<locals>.CreateCMD:Setting name = 'Unknown'
DEBUG:dffml.service.dev.create_from_skel.<locals>.CreateCMD:Setting package = 'test-package'
DEBUG:dffml.service.dev.create_from_skel.<locals>.CreateCMD:Setting target = '/tmp/tmps89shgmy/dot'
DEBUG:dffml.service.dev.create_from_skel.<locals>.CreateCMD:Setting user = 'hashim'
copying /usr/src/dffml/dffml/skel/common/pyproject.toml -> /tmp/tmps89shgmy/dot
copying /usr/src/dffml/dffml/skel/common/.gitignore -> /tmp/tmps89shgmy/dot
creating /tmp/tmps89shgmy/dot/tests
copying /usr/src/dffml/dffml/skel/common/tests/__init__.py -> /tmp/tmps89shgmy/dot/tests
copying /usr/src/dffml/dffml/skel/common/.coveragerc -> /tmp/tmps89shgmy/dot
copying /usr/src/dffml/dffml/skel/common/LICENSE -> /tmp/tmps89shgmy/dot
copying /usr/src/dffml/dffml/skel/common/setup_common.py -> /tmp/tmps89shgmy/dot
creating /tmp/tmps89shgmy/dot/REPLACE_IMPORT_PACKAGE_NAME
copying /usr/src/dffml/dffml/skel/common/REPLACE_IMPORT_PACKAGE_NAME/version.py -> /tmp/tmps89shgmy/dot/REPLACE_IMPORT_PACKAGE_NAME
copying /usr/src/dffml/dffml/skel/common/REPLACE_IMPORT_PACKAGE_NAME/__init__.py -> /tmp/tmps89shgmy/dot/REPLACE_IMPORT_PACKAGE_NAME
copying /usr/src/dffml/dffml/skel/common/MANIFEST.in -> /tmp/tmps89shgmy/dot
copying /usr/src/dffml/dffml/skel/operations/setup.py -> /tmp/tmps89shgmy/dot
copying /usr/src/dffml/dffml/skel/operations/pyproject.toml -> /tmp/tmps89shgmy/dot
copying /usr/src/dffml/dffml/skel/operations/.gitignore -> /tmp/tmps89shgmy/dot
copying /usr/src/dffml/dffml/skel/operations/tests/test_operations.py -> /tmp/tmps89shgmy/dot/tests
copying /usr/src/dffml/dffml/skel/operations/tests/__init__.py -> /tmp/tmps89shgmy/dot/tests
copying /usr/src/dffml/dffml/skel/operations/.coveragerc -> /tmp/tmps89shgmy/dot
copying /usr/src/dffml/dffml/skel/operations/LICENSE -> /tmp/tmps89shgmy/dot
copying /usr/src/dffml/dffml/skel/operations/Dockerfile -> /tmp/tmps89shgmy/dot
copying /usr/src/dffml/dffml/skel/operations/README.md -> /tmp/tmps89shgmy/dot
copying /usr/src/dffml/dffml/skel/operations/setup_common.py -> /tmp/tmps89shgmy/dot
copying /usr/src/dffml/dffml/skel/operations/REPLACE_IMPORT_PACKAGE_NAME/definitions.py -> /tmp/tmps89shgmy/dot/REPLACE_IMPORT_PACKAGE_NAME
copying /usr/src/dffml/dffml/skel/operations/REPLACE_IMPORT_PACKAGE_NAME/version.py -> /tmp/tmps89shgmy/dot/REPLACE_IMPORT_PACKAGE_NAME
copying /usr/src/dffml/dffml/skel/operations/REPLACE_IMPORT_PACKAGE_NAME/operations.py -> /tmp/tmps89shgmy/dot/REPLACE_IMPORT_PACKAGE_NAME
copying /usr/src/dffml/dffml/skel/operations/REPLACE_IMPORT_PACKAGE_NAME/__init__.py -> /tmp/tmps89shgmy/dot/REPLACE_IMPORT_PACKAGE_NAME
copying /usr/src/dffml/dffml/skel/operations/MANIFEST.in -> /tmp/tmps89shgmy/dot
DEBUG:dffml.service.dev.create_from_skel.<locals>.CreateCMD:Setting description = None
DEBUG:dffml.service.dev.create_from_skel.<locals>.CreateCMD:Setting email = 'unknown@example.com'
DEBUG:dffml.service.dev.create_from_skel.<locals>.CreateCMD:Setting log = 20
DEBUG:dffml.service.dev.create_from_skel.<locals>.CreateCMD:Setting name = 'Unknown'
DEBUG:dffml.service.dev.create_from_skel.<locals>.CreateCMD:Setting package = 'test-package'
DEBUG:dffml.service.dev.create_from_skel.<locals>.CreateCMD:Setting target = None
DEBUG:dffml.service.dev.create_from_skel.<locals>.CreateCMD:Setting user = 'hashim'
creating test-package
copying /usr/src/dffml/dffml/skel/common/pyproject.toml -> test-package
copying /usr/src/dffml/dffml/skel/common/.gitignore -> test-package
creating test-package/tests
copying /usr/src/dffml/dffml/skel/common/tests/__init__.py -> test-package/tests
copying /usr/src/dffml/dffml/skel/common/.coveragerc -> test-package
copying /usr/src/dffml/dffml/skel/common/LICENSE -> test-package
copying /usr/src/dffml/dffml/skel/common/setup_common.py -> test-package
creating test-package/REPLACE_IMPORT_PACKAGE_NAME
copying /usr/src/dffml/dffml/skel/common/REPLACE_IMPORT_PACKAGE_NAME/version.py -> test-package/REPLACE_IMPORT_PACKAGE_NAME
copying /usr/src/dffml/dffml/skel/common/REPLACE_IMPORT_PACKAGE_NAME/__init__.py -> test-package/REPLACE_IMPORT_PACKAGE_NAME
copying /usr/src/dffml/dffml/skel/common/MANIFEST.in -> test-package
copying /usr/src/dffml/dffml/skel/operations/setup.py -> test-package
copying /usr/src/dffml/dffml/skel/operations/pyproject.toml -> test-package
copying /usr/src/dffml/dffml/skel/operations/.gitignore -> test-package
copying /usr/src/dffml/dffml/skel/operations/tests/test_operations.py -> test-package/tests
copying /usr/src/dffml/dffml/skel/operations/tests/__init__.py -> test-package/tests
copying /usr/src/dffml/dffml/skel/operations/.coveragerc -> test-package
copying /usr/src/dffml/dffml/skel/operations/LICENSE -> test-package
copying /usr/src/dffml/dffml/skel/operations/Dockerfile -> test-package
copying /usr/src/dffml/dffml/skel/operations/README.md -> test-package
copying /usr/src/dffml/dffml/skel/operations/setup_common.py -> test-package
copying /usr/src/dffml/dffml/skel/operations/REPLACE_IMPORT_PACKAGE_NAME/definitions.py -> test-package/REPLACE_IMPORT_PACKAGE_NAME
copying /usr/src/dffml/dffml/skel/operations/REPLACE_IMPORT_PACKAGE_NAME/version.py -> test-package/REPLACE_IMPORT_PACKAGE_NAME
copying /usr/src/dffml/dffml/skel/operations/REPLACE_IMPORT_PACKAGE_NAME/operations.py -> test-package/REPLACE_IMPORT_PACKAGE_NAME
copying /usr/src/dffml/dffml/skel/operations/REPLACE_IMPORT_PACKAGE_NAME/__init__.py -> test-package/REPLACE_IMPORT_PACKAGE_NAME
copying /usr/src/dffml/dffml/skel/operations/MANIFEST.in -> test-package
ok
test_service (tests.service.test_dev.TestDevelopCreate) ... DEBUG:dffml.service.dev.create_from_skel.<locals>.CreateCMD:Setting description = None
DEBUG:dffml.service.dev.create_from_skel.<locals>.CreateCMD:Setting email = 'unknown@example.com'
DEBUG:dffml.service.dev.create_from_skel.<locals>.CreateCMD:Setting log = 20
DEBUG:dffml.service.dev.create_from_skel.<locals>.CreateCMD:Setting name = 'Unknown'
DEBUG:dffml.service.dev.create_from_skel.<locals>.CreateCMD:Setting package = 'test-package'
DEBUG:dffml.service.dev.create_from_skel.<locals>.CreateCMD:Setting target = '/tmp/tmp9yt4afe7/dot'
DEBUG:dffml.service.dev.create_from_skel.<locals>.CreateCMD:Setting user = 'hashim'
copying /usr/src/dffml/dffml/skel/common/pyproject.toml -> /tmp/tmp9yt4afe7/dot
copying /usr/src/dffml/dffml/skel/common/.gitignore -> /tmp/tmp9yt4afe7/dot
creating /tmp/tmp9yt4afe7/dot/tests
copying /usr/src/dffml/dffml/skel/common/tests/__init__.py -> /tmp/tmp9yt4afe7/dot/tests
copying /usr/src/dffml/dffml/skel/common/.coveragerc -> /tmp/tmp9yt4afe7/dot
copying /usr/src/dffml/dffml/skel/common/LICENSE -> /tmp/tmp9yt4afe7/dot
copying /usr/src/dffml/dffml/skel/common/setup_common.py -> /tmp/tmp9yt4afe7/dot
creating /tmp/tmp9yt4afe7/dot/REPLACE_IMPORT_PACKAGE_NAME
copying /usr/src/dffml/dffml/skel/common/REPLACE_IMPORT_PACKAGE_NAME/version.py -> /tmp/tmp9yt4afe7/dot/REPLACE_IMPORT_PACKAGE_NAME
copying /usr/src/dffml/dffml/skel/common/REPLACE_IMPORT_PACKAGE_NAME/__init__.py -> /tmp/tmp9yt4afe7/dot/REPLACE_IMPORT_PACKAGE_NAME
copying /usr/src/dffml/dffml/skel/common/MANIFEST.in -> /tmp/tmp9yt4afe7/dot
copying /usr/src/dffml/dffml/skel/service/setup.py -> /tmp/tmp9yt4afe7/dot
copying /usr/src/dffml/dffml/skel/service/pyproject.toml -> /tmp/tmp9yt4afe7/dot
copying /usr/src/dffml/dffml/skel/service/.gitignore -> /tmp/tmp9yt4afe7/dot
copying /usr/src/dffml/dffml/skel/service/tests/test_service.py -> /tmp/tmp9yt4afe7/dot/tests
copying /usr/src/dffml/dffml/skel/service/tests/__init__.py -> /tmp/tmp9yt4afe7/dot/tests
copying /usr/src/dffml/dffml/skel/service/.coveragerc -> /tmp/tmp9yt4afe7/dot
copying /usr/src/dffml/dffml/skel/service/LICENSE -> /tmp/tmp9yt4afe7/dot
copying /usr/src/dffml/dffml/skel/service/README.md -> /tmp/tmp9yt4afe7/dot
copying /usr/src/dffml/dffml/skel/service/setup_common.py -> /tmp/tmp9yt4afe7/dot
copying /usr/src/dffml/dffml/skel/service/REPLACE_IMPORT_PACKAGE_NAME/version.py -> /tmp/tmp9yt4afe7/dot/REPLACE_IMPORT_PACKAGE_NAME
copying /usr/src/dffml/dffml/skel/service/REPLACE_IMPORT_PACKAGE_NAME/__init__.py -> /tmp/tmp9yt4afe7/dot/REPLACE_IMPORT_PACKAGE_NAME
copying /usr/src/dffml/dffml/skel/service/REPLACE_IMPORT_PACKAGE_NAME/misc.py -> /tmp/tmp9yt4afe7/dot/REPLACE_IMPORT_PACKAGE_NAME
copying /usr/src/dffml/dffml/skel/service/MANIFEST.in -> /tmp/tmp9yt4afe7/dot
DEBUG:dffml.service.dev.create_from_skel.<locals>.CreateCMD:Setting description = None
DEBUG:dffml.service.dev.create_from_skel.<locals>.CreateCMD:Setting email = 'unknown@example.com'
DEBUG:dffml.service.dev.create_from_skel.<locals>.CreateCMD:Setting log = 20
DEBUG:dffml.service.dev.create_from_skel.<locals>.CreateCMD:Setting name = 'Unknown'
DEBUG:dffml.service.dev.create_from_skel.<locals>.CreateCMD:Setting package = 'test-package'
DEBUG:dffml.service.dev.create_from_skel.<locals>.CreateCMD:Setting target = None
DEBUG:dffml.service.dev.create_from_skel.<locals>.CreateCMD:Setting user = 'hashim'
creating test-package
copying /usr/src/dffml/dffml/skel/common/pyproject.toml -> test-package
copying /usr/src/dffml/dffml/skel/common/.gitignore -> test-package
creating test-package/tests
copying /usr/src/dffml/dffml/skel/common/tests/__init__.py -> test-package/tests
copying /usr/src/dffml/dffml/skel/common/.coveragerc -> test-package
copying /usr/src/dffml/dffml/skel/common/LICENSE -> test-package
copying /usr/src/dffml/dffml/skel/common/setup_common.py -> test-package
creating test-package/REPLACE_IMPORT_PACKAGE_NAME
copying /usr/src/dffml/dffml/skel/common/REPLACE_IMPORT_PACKAGE_NAME/version.py -> test-package/REPLACE_IMPORT_PACKAGE_NAME
copying /usr/src/dffml/dffml/skel/common/REPLACE_IMPORT_PACKAGE_NAME/__init__.py -> test-package/REPLACE_IMPORT_PACKAGE_NAME
copying /usr/src/dffml/dffml/skel/common/MANIFEST.in -> test-package
copying /usr/src/dffml/dffml/skel/service/setup.py -> test-package
copying /usr/src/dffml/dffml/skel/service/pyproject.toml -> test-package
copying /usr/src/dffml/dffml/skel/service/.gitignore -> test-package
copying /usr/src/dffml/dffml/skel/service/tests/test_service.py -> test-package/tests
copying /usr/src/dffml/dffml/skel/service/tests/__init__.py -> test-package/tests
copying /usr/src/dffml/dffml/skel/service/.coveragerc -> test-package
copying /usr/src/dffml/dffml/skel/service/LICENSE -> test-package
copying /usr/src/dffml/dffml/skel/service/README.md -> test-package
copying /usr/src/dffml/dffml/skel/service/setup_common.py -> test-package
copying /usr/src/dffml/dffml/skel/service/REPLACE_IMPORT_PACKAGE_NAME/version.py -> test-package/REPLACE_IMPORT_PACKAGE_NAME
copying /usr/src/dffml/dffml/skel/service/REPLACE_IMPORT_PACKAGE_NAME/__init__.py -> test-package/REPLACE_IMPORT_PACKAGE_NAME
copying /usr/src/dffml/dffml/skel/service/REPLACE_IMPORT_PACKAGE_NAME/misc.py -> test-package/REPLACE_IMPORT_PACKAGE_NAME
copying /usr/src/dffml/dffml/skel/service/MANIFEST.in -> test-package
ok
test_run (tests.service.test_dev.TestDevelopSkelLink) ... DEBUG:dffml.service.dev.Link:Setting log = 20
ok
test_run (tests.service.test_dev.TestExport) ... DEBUG:dffml.service.dev.Export:Setting config = <class 'dffml.configloader.json.JSONConfigLoader'>
DEBUG:dffml.service.dev.Export:Setting export = 'tests.test_df:DATAFLOW'
DEBUG:dffml.service.dev.Export:Setting log = 20
DEBUG:dffml.service.dev.Export:Setting not_linked = False
DEBUG:dffml.JSONConfigLoader:BaseConfig()
DEBUG:dffml.service.dev.Export:Loaded tests.test_df:DATAFLOW: <dffml.df.types.DataFlow object at 0x7f6cf199f890>
ok
test_already_on_pypi (tests.service.test_dev.TestRelease) ... DEBUG:dffml.service.dev.Release:Setting log = 20
DEBUG:dffml.service.dev.Release:Setting package = PosixPath('.')
DEBUG:dffml.service.dev.Release:Running: git status --porcelain .
ok
test_okay (tests.service.test_dev.TestRelease) ... DEBUG:dffml.service.dev.Release:Setting log = 20
DEBUG:dffml.service.dev.Release:Setting package = PosixPath('.')
DEBUG:dffml.service.dev.Release:Running: git status --porcelain .
ok
test_uncommited_changes (tests.service.test_dev.TestRelease) ... DEBUG:dffml.service.dev.Release:Setting log = 20
DEBUG:dffml.service.dev.Release:Setting package = PosixPath('.')
DEBUG:dffml.service.dev.Release:Running: git status --porcelain .
ok
test_run (tests.service.test_dev.TestRun) ... DEBUG:dffml.service.dev.Run:Setting log = 20
DEBUG:dffml.service.dev.Run:Setting operation = 'dffml.operation.db:db_query_create_table'
DEBUG:dffml.service.dev.Run:Loaded operation: db_query_create_table(<function db_query_create_table at 0x7f6cf19bf170>)
CRITICAL:dffml.service.dev.Run:Dict / spec'd arguments are not yet completely handled
DEBUG:dffml.MemoryInputNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationNetwork:MemoryOperationNetworkConfig(operations=[])
DEBUG:dffml.MemoryLockNetwork:BaseConfig()
DEBUG:dffml.MemoryOperationImplementationNetwork:MemoryOperationImplementationNetworkConfig(operations={})
DEBUG:dffml.MemoryKeyValueStore:BaseConfig()
DEBUG:dffml.MemoryRedundancyChecker:BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))
DEBUG:dffml.MemoryOrchestrator:MemoryOrchestratorConfig(input_network=MemoryInputNetwork(BaseConfig()), operation_network=MemoryOperationNetwork(MemoryOperationNetworkConfig(operations=[])), lock_network=MemoryLockNetwork(BaseConfig()), opimp_network=MemoryOperationImplementationNetwork(MemoryOperationImplementationNetworkConfig(operations={})), rchecker=MemoryRedundancyChecker(BaseRedundancyCheckerConfig(key_value_store=MemoryKeyValueStore(BaseConfig()))))
DEBUG:dffml.MemoryOrchestratorContext:Initializing dataflow: <dffml.df.types.DataFlow object at 0x7f6ce8d29550>
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation get_single(get_single) with base config
DEBUG:dffml.GetSingleImplementation:BaseConfig()
DEBUG:dffml.MemoryOrchestratorContext:Instantiating operation implementation db_query_create_table(db_query_create_table) with provided config {'database': {'plugin': ['sqlite'], 'config': {'filename': {'plugin': ['/tmp/tmpbh3ydxe7.db'], 'config': {}}}}}
DEBUG:dffml.SqliteDatabase:SqliteDatabaseConfig(filename='/tmp/tmpbh3ydxe7.db')
DEBUG:dffml.DbQueryCreateTableImplementation:DatabaseQueryConfig(database=SqliteDatabase(SqliteDatabaseConfig(filename='/tmp/tmpbh3ydxe7.db')))
DEBUG:dffml.MemoryOrchestratorContext:Running <dffml.df.types.DataFlow object at 0x7f6ce8d29550>: ([Input(value=[], definition=get_single_spec), Input(value=FEEDFACE, definition=query_table), Input(value={'DEADBEEF': 'text'}, definition=query_cols)],)
DEBUG:dffml.MemoryOrchestratorContext:Seeding dataflow with input_set: [Input(value=[], definition=get_single_spec), Input(value=FEEDFACE, definition=query_table), Input(value={'DEADBEEF': 'text'}, definition=query_cols)]
DEBUG:dffml.MemoryOrchestratorContext:kickstarting context: 8904055a5c767c5f87cf40469ca3cdcd4e5378fc7b7abf5c1b30e5e4b2bdfcd9
DEBUG:dffml.MemoryOperationImplementationNetworkContext:[DISPATCH] db_query_create_table
DEBUG:dffml.MemoryOrchestratorContext:[8904055a5c767c5f87cf40469ca3cdcd4e5378fc7b7abf5c1b30e5e4b2bdfcd9]: dispatch operation: db_query_create_table
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: PROCESSING: db_query_create_table
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'table_name': 'FEEDFACE', 'cols': {'DEADBEEF': 'text'}}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.SqliteDatabaseContext:CREATE TABLE IF NOT EXISTS FEEDFACE (`DEADBEEF` text)
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: None
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Stage: OUTPUT: get_single
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Inputs: {'spec': []}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Conditions: {}
DEBUG:dffml.GetSingle:output spec: []
DEBUG:dffml.MemoryOperationImplementationNetworkContext:Outputs: {}
DEBUG:dffml.MemoryOperationImplementationNetworkContext:---
ok
test_open_close_all (tests.util.test_asynccontextmanager.TestAsyncContextManagerList) ... DEBUG:dffml.util.AsyncContextManagerList.AsyncContextManagerList:Entering: <tests.util.test_asynccontextmanager.OpenCloseTester object at 0x7f6ce8d29c10>
DEBUG:dffml.util.AsyncContextManagerList.AsyncContextManagerList:Entering: <tests.util.test_asynccontextmanager.OpenCloseTester object at 0x7f6cf16a8fd0>
ok
test_mktempdir (tests.util.test_tempdir.TestTempDir) ... DEBUG:dffml.util.tempdir:Created directory '/tmp/dffml_d5moobyu'
ok
test_rmtempdirs (tests.util.test_tempdir.TestTempDir) ... DEBUG:dffml.util.tempdir:Created directory '/tmp/dffml_mcoqk7ax'
DEBUG:dffml.util.tempdir:Removing directory '/tmp/dffml_mcoqk7ax'
ok
test_removes_on_aexit (tests.util.test_tempdir.TestTempDirAsyncContextManager) ... DEBUG:dffml.util.tempdir:Created directory '/tmp/dffml_v8jzlywb'
DEBUG:dffml.util.tempdir:Created directory '/tmp/dffml_o9fi9zxk'
DEBUG:dffml.util.tempdir:Created directory '/tmp/dffml_xa_7fhbi'
DEBUG:dffml.util.tempdir:Created directory '/tmp/dffml_csbsyaik'
DEBUG:dffml.util.tempdir:Created directory '/tmp/dffml_iefx00e4'
DEBUG:dffml.util.tempdir:Created directory '/tmp/dffml_ecx2gben'
DEBUG:dffml.util.tempdir:Created directory '/tmp/dffml_lhjvh45u'
DEBUG:dffml.util.tempdir:Created directory '/tmp/dffml_jkru1yya'
DEBUG:dffml.util.tempdir:Created directory '/tmp/dffml_aqdyatmc'
DEBUG:dffml.util.tempdir:Created directory '/tmp/dffml_r4ye4i76'
DEBUG:dffml.util.tempdir:Removing directory '/tmp/dffml_v8jzlywb'
DEBUG:dffml.util.tempdir:Removing directory '/tmp/dffml_o9fi9zxk'
DEBUG:dffml.util.tempdir:Removing directory '/tmp/dffml_xa_7fhbi'
DEBUG:dffml.util.tempdir:Removing directory '/tmp/dffml_csbsyaik'
DEBUG:dffml.util.tempdir:Removing directory '/tmp/dffml_iefx00e4'
DEBUG:dffml.util.tempdir:Removing directory '/tmp/dffml_ecx2gben'
DEBUG:dffml.util.tempdir:Removing directory '/tmp/dffml_lhjvh45u'
DEBUG:dffml.util.tempdir:Removing directory '/tmp/dffml_jkru1yya'
DEBUG:dffml.util.tempdir:Removing directory '/tmp/dffml_aqdyatmc'
DEBUG:dffml.util.tempdir:Removing directory '/tmp/dffml_r4ye4i76'
ok
test_cached_download (tests.util.test_net.TestNet) ... ERROR
test_cached_download_unpack_archive (tests.util.test_net.TestNet) ... ERROR
test_aenter_all (tests.util.test_asynchelper.TestAsyncContextManagerList) ... DEBUG:dffml.util.AsyncContextManagerList.AsyncContextManagerList:Entering: <contextlib._AsyncGeneratorContextManager object at 0x7f6cf121b550>
DEBUG:dffml.util.AsyncContextManagerList.AsyncContextManagerList:Entering: <contextlib._AsyncGeneratorContextManager object at 0x7f6cf12af210>
ok
test_cancel_not_done (tests.util.test_asynchelper.TestConcurrently) ... DEBUG:dffml.util.concurrently:[<Task finished coro=<TestConcurrently._cancel_later() done, defined at /usr/src/dffml/tests/util/test_asynchelper.py:98> result=None>] done
ok
test_ignore_errors (tests.util.test_asynchelper.TestConcurrently) ... DEBUG:dffml.util.concurrently:[<Task finished coro=<TestConcurrently._test_bad_method() done, defined at /usr/src/dffml/tests/util/test_asynchelper.py:34> exception=ValueError()>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<TestConcurrently._test_bad_method() done, defined at /usr/src/dffml/tests/util/test_asynchelper.py:34> exception=ValueError()>] Ignoring exception:
DEBUG:dffml.util.concurrently:[<Task finished coro=<TestConcurrently._test_bad_method() done, defined at /usr/src/dffml/tests/util/test_asynchelper.py:34> result=17>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<TestConcurrently._test_bad_method() done, defined at /usr/src/dffml/tests/util/test_asynchelper.py:34> result=13>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<TestConcurrently._test_bad_method() done, defined at /usr/src/dffml/tests/util/test_asynchelper.py:34> result=9>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<TestConcurrently._test_bad_method() done, defined at /usr/src/dffml/tests/util/test_asynchelper.py:34> exception=ValueError()>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<TestConcurrently._test_bad_method() done, defined at /usr/src/dffml/tests/util/test_asynchelper.py:34> exception=ValueError()>] Ignoring exception:
DEBUG:dffml.util.concurrently:[<Task finished coro=<TestConcurrently._test_bad_method() done, defined at /usr/src/dffml/tests/util/test_asynchelper.py:34> exception=ValueError()>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<TestConcurrently._test_bad_method() done, defined at /usr/src/dffml/tests/util/test_asynchelper.py:34> exception=ValueError()>] Ignoring exception:
DEBUG:dffml.util.concurrently:[<Task finished coro=<TestConcurrently._test_bad_method() done, defined at /usr/src/dffml/tests/util/test_asynchelper.py:34> result=25>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<TestConcurrently._test_bad_method() done, defined at /usr/src/dffml/tests/util/test_asynchelper.py:34> exception=ValueError()>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<TestConcurrently._test_bad_method() done, defined at /usr/src/dffml/tests/util/test_asynchelper.py:34> exception=ValueError()>] Ignoring exception:
DEBUG:dffml.util.concurrently:[<Task finished coro=<TestConcurrently._test_bad_method() done, defined at /usr/src/dffml/tests/util/test_asynchelper.py:34> result=21>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<TestConcurrently._test_bad_method() done, defined at /usr/src/dffml/tests/util/test_asynchelper.py:34> exception=ValueError()>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<TestConcurrently._test_bad_method() done, defined at /usr/src/dffml/tests/util/test_asynchelper.py:34> exception=ValueError()>] Ignoring exception:
DEBUG:dffml.util.concurrently:[<Task finished coro=<TestConcurrently._test_bad_method() done, defined at /usr/src/dffml/tests/util/test_asynchelper.py:34> exception=ValueError()>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<TestConcurrently._test_bad_method() done, defined at /usr/src/dffml/tests/util/test_asynchelper.py:34> exception=ValueError()>] Ignoring exception:
DEBUG:dffml.util.concurrently:[<Task finished coro=<TestConcurrently._test_bad_method() done, defined at /usr/src/dffml/tests/util/test_asynchelper.py:34> exception=ValueError()>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<TestConcurrently._test_bad_method() done, defined at /usr/src/dffml/tests/util/test_asynchelper.py:34> exception=ValueError()>] Ignoring exception:
DEBUG:dffml.util.concurrently:[<Task finished coro=<TestConcurrently._test_bad_method() done, defined at /usr/src/dffml/tests/util/test_asynchelper.py:34> exception=ValueError()>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<TestConcurrently._test_bad_method() done, defined at /usr/src/dffml/tests/util/test_asynchelper.py:34> exception=ValueError()>] Ignoring exception:
DEBUG:dffml.util.concurrently:[<Task finished coro=<TestConcurrently._test_bad_method() done, defined at /usr/src/dffml/tests/util/test_asynchelper.py:34> exception=ValueError()>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<TestConcurrently._test_bad_method() done, defined at /usr/src/dffml/tests/util/test_asynchelper.py:34> exception=ValueError()>] Ignoring exception:
ok
test_more_tasks_on_the_fly (tests.util.test_asynchelper.TestConcurrently) ... DEBUG:dffml.util.concurrently:[<Task finished coro=<TestConcurrently._test_method() done, defined at /usr/src/dffml/tests/util/test_asynchelper.py:31> result=21>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<TestConcurrently._test_method() done, defined at /usr/src/dffml/tests/util/test_asynchelper.py:31> result=25>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<TestConcurrently._test_method() done, defined at /usr/src/dffml/tests/util/test_asynchelper.py:31> result=19>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<TestConcurrently._test_method() done, defined at /usr/src/dffml/tests/util/test_asynchelper.py:31> result=13>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<TestConcurrently._test_method() done, defined at /usr/src/dffml/tests/util/test_asynchelper.py:31> result=7>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<TestConcurrently._test_method() done, defined at /usr/src/dffml/tests/util/test_asynchelper.py:31> result=15>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<TestConcurrently._test_method() done, defined at /usr/src/dffml/tests/util/test_asynchelper.py:31> result=9>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<TestConcurrently._test_method() done, defined at /usr/src/dffml/tests/util/test_asynchelper.py:31> result=11>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<TestConcurrently._test_method() done, defined at /usr/src/dffml/tests/util/test_asynchelper.py:31> result=23>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<TestConcurrently._test_method() done, defined at /usr/src/dffml/tests/util/test_asynchelper.py:31> result=17>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<TestConcurrently._test_method() done, defined at /usr/src/dffml/tests/util/test_asynchelper.py:31> result=27>] done
ok
test_no_cancel (tests.util.test_asynchelper.TestConcurrently) ... DEBUG:dffml.util.concurrently:[<Task finished coro=<TestConcurrently._test_method() done, defined at /usr/src/dffml/tests/util/test_asynchelper.py:31> result=25>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<TestConcurrently._test_method() done, defined at /usr/src/dffml/tests/util/test_asynchelper.py:31> result=21>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<TestConcurrently._test_method() done, defined at /usr/src/dffml/tests/util/test_asynchelper.py:31> result=7>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<TestConcurrently._test_method() done, defined at /usr/src/dffml/tests/util/test_asynchelper.py:31> result=15>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<TestConcurrently._test_method() done, defined at /usr/src/dffml/tests/util/test_asynchelper.py:31> result=11>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<TestConcurrently._test_method() done, defined at /usr/src/dffml/tests/util/test_asynchelper.py:31> result=13>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<TestConcurrently._test_method() done, defined at /usr/src/dffml/tests/util/test_asynchelper.py:31> result=9>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<TestConcurrently._test_method() done, defined at /usr/src/dffml/tests/util/test_asynchelper.py:31> result=19>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<TestConcurrently._test_method() done, defined at /usr/src/dffml/tests/util/test_asynchelper.py:31> result=23>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<TestConcurrently._test_method() done, defined at /usr/src/dffml/tests/util/test_asynchelper.py:31> result=17>] done
ok
test_no_errors (tests.util.test_asynchelper.TestConcurrently) ... DEBUG:dffml.util.concurrently:[<Task finished coro=<TestConcurrently._test_method() done, defined at /usr/src/dffml/tests/util/test_asynchelper.py:31> result=25>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<TestConcurrently._test_method() done, defined at /usr/src/dffml/tests/util/test_asynchelper.py:31> result=23>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<TestConcurrently._test_method() done, defined at /usr/src/dffml/tests/util/test_asynchelper.py:31> result=17>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<TestConcurrently._test_method() done, defined at /usr/src/dffml/tests/util/test_asynchelper.py:31> result=15>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<TestConcurrently._test_method() done, defined at /usr/src/dffml/tests/util/test_asynchelper.py:31> result=11>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<TestConcurrently._test_method() done, defined at /usr/src/dffml/tests/util/test_asynchelper.py:31> result=13>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<TestConcurrently._test_method() done, defined at /usr/src/dffml/tests/util/test_asynchelper.py:31> result=9>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<TestConcurrently._test_method() done, defined at /usr/src/dffml/tests/util/test_asynchelper.py:31> result=21>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<TestConcurrently._test_method() done, defined at /usr/src/dffml/tests/util/test_asynchelper.py:31> result=7>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<TestConcurrently._test_method() done, defined at /usr/src/dffml/tests/util/test_asynchelper.py:31> result=19>] done
ok
test_raise_errors (tests.util.test_asynchelper.TestConcurrently) ... DEBUG:dffml.util.concurrently:[<Task finished coro=<TestConcurrently._test_bad_method() done, defined at /usr/src/dffml/tests/util/test_asynchelper.py:34> result=25>] done
DEBUG:dffml.util.concurrently:[<Task finished coro=<TestConcurrently._test_bad_method() done, defined at /usr/src/dffml/tests/util/test_asynchelper.py:34> exception=ValueError()>] done
ok
test_load_given_name (tests.util.test_entrypoint.TestEntrypoint) ... ok
test_load_multiple (tests.util.test_entrypoint.TestEntrypoint) ... ok
test_load_no_found (tests.util.test_entrypoint.TestEntrypoint) ... ok
test_numpy_docstring_args (tests.util.test_config.TestMakeConfig) ... ok
test_init (tests.util.test_cli.TestArg) ... ok
test_modify (tests.util.test_cli.TestArg) ... ok
test_parse_unknown (tests.util.test_cli.TestArg) ... ok
test_async_context_management (tests.util.test_cli.TestCMD) ... DEBUG:dffml.util.cli.cmd.CMD:Setting log = 20
ok
test_cli_no_sub_command (tests.util.test_cli.TestCMD) ... ok
test_cli_run_single (tests.util.test_cli.TestCMD) ... DEBUG:tests.util.test_cli.TestCMD.test_cli_run_single.<locals>.Primary:Setting log = 20
ok
test_cli_run_sub_command (tests.util.test_cli.TestCMD) ... DEBUG:tests.util.test_cli.TestCMD.test_cli_run_sub_command.<locals>.Secondary:Setting log = 20
ok
test_cli_run_sub_command_asyncgen (tests.util.test_cli.TestCMD) ... DEBUG:tests.util.test_cli.TestCMD.test_cli_run_sub_command_asyncgen.<locals>.Secondary:Setting log = 20
ok
test_cli_sub_command_without_run (tests.util.test_cli.TestCMD) ... ok
test_init (tests.util.test_cli.TestCMD) ... DEBUG:tests.util.test_cli.TestCMD.test_init.<locals>.CMDTest:Ignored ignored
DEBUG:tests.util.test_cli.TestCMD.test_init.<locals>.CMDTest:Setting log = 20
DEBUG:tests.util.test_cli.TestCMD.test_init.<locals>.CMDTest:Setting nope = True
ok
test_main_result_none (tests.util.test_cli.TestCMD) ... DEBUG:asyncio:Using selector: EpollSelector
DEBUG:tests.util.test_cli.TestCMD.test_main_result_none.<locals>.Secondary:Setting log = 20
ok
test_main_result_not_none (tests.util.test_cli.TestCMD) ... DEBUG:asyncio:Using selector: EpollSelector
DEBUG:tests.util.test_cli.TestCMD.test_main_result_not_none.<locals>.Secondary:Setting log = 20
ok
test_parse_args (tests.util.test_cli.TestCMD) ... ok
test_sanitize_args (tests.util.test_cli.TestCMD) ... DEBUG:dffml.util.cli.cmd.CMD:Setting log = 20
ok
test_default (tests.util.test_cli.TestJSONEncoder) ... ok
test_feature (tests.util.test_cli.TestJSONEncoder) ... ok
test_record (tests.util.test_cli.TestJSONEncoder) ... ok
test_display_docstring (tests.util.test_cli.TestListEntrypoint) ... DEBUG:dffml.util.cli.cmds.ListEntrypoint:Setting log = 20
ok
test_display_no_docstring (tests.util.test_cli.TestListEntrypoint) ... DEBUG:dffml.util.cli.cmds.ListEntrypoint:Setting log = 20
ok
test_run (tests.util.test_cli.TestListEntrypoint) ... DEBUG:tests.util.test_cli.TestListEntrypoint.test_run.<locals>.FakeListEntrypoint:Setting log = 20
ok
test_inputs (tests.util.test_cli.TestParseActions) ... ok
test_list_action (tests.util.test_cli.TestParseActions) ... ok
test_logging (tests.util.test_cli.TestParseActions) ... ok
test_add_subs (tests.util.test_cli.TestParser) ... ok
test_prepend_to_path (tests.util.test_os.TestOS) ... ok
test_all_common_files_accounted_for (tests.util.test_skel.TestSkelUtil) ... ok
test_convert_dtype (tests.test_feature.TestFeature) ... ok
test_convert_dtype_invalid (tests.test_feature.TestFeature) ... ok
test_default_dtype (tests.test_feature.TestFeature) ... ok
test_default_length (tests.test_feature.TestFeature) ... ok
test_load_def (tests.test_feature.TestFeature) ... ok
test_names (tests.test_feature.TestFeatures) ... ok
test_string_keys (tests.integration.test_sources.TestCSV) ... skipped 'Required plugins: dffml-model-scikit must be installed in development mode'
test_export (tests.integration.test_service_dev.TestDevelop) ... skipped 'Required plugins: shouldi must be installed in development mode'
test_run (tests.integration.test_service_dev.TestDevelop) ... skipped 'Required plugins: dffml-model-scratch must be installed in development mode'
test_records (tests.integration.test_cli.TestList) ... DEBUG:dffml.cli.list.ListRecords:Setting log = 20
DEBUG:dffml.cli.list.ListRecords:Setting sources = [<class 'dffml.base.MemorySource'>]
DEBUG:dffml.MemorySource:MemorySourceConfig(records=[{'key': 'A', 'extra': {}}, {'key': 'B', 'extra': {}}, {'key': 'C', 'extra': {}}])
DEBUG:dffml.util.AsyncContextManagerList.Sources:Entering: MemorySource(MemorySourceConfig(records=[{'key': 'A', 'extra': {}}, {'key': 'B', 'extra': {}}, {'key': 'C', 'extra': {}}]))
DEBUG:dffml.util.AsyncContextManagerListContext.SourcesContext:Entering context: <dffml.source.memory.MemorySourceContext object at 0x7f6cf11d3e90>
ok
test_memory_to_csv (tests.integration.test_cli.TestMerge) ... DEBUG:dffml.cli.cli.Merge:Setting dest = <class 'dffml.base.CSVSource'>
DEBUG:dffml.cli.cli.Merge:Setting log = 20
DEBUG:dffml.cli.cli.Merge:Setting src = <class 'dffml.base.MemorySource'>
DEBUG:dffml.MemorySource:MemorySourceConfig(records=[{'key': 'A', 'extra': {}}, {'key': 'B', 'extra': {}}, {'key': 'C', 'extra': {}}])
DEBUG:dffml.CSVSource:CSVSourceConfig(filename='/tmp/tmp_yzgi25j/0.4799478543888236', tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)
DEBUG:dffml.CSVSource:PosixPath('/tmp/tmp_yzgi25j/0.4799478543888236') is not a file, initializing memory to empty dict
DEBUG:dffml.CSVSource:/tmp/tmp_yzgi25j/0.4799478543888236 first open
DEBUG:dffml.CSVSource:fieldnames: ['key', 'tag']
DEBUG:dffml.CSVSource:/tmp/tmp_yzgi25j/0.4799478543888236 written
DEBUG:dffml.CSVSource:CSVSource(CSVSourceConfig(filename=PosixPath('/tmp/tmp_yzgi25j/0.4799478543888236'), tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)) saved 3 records
ok
test_memory_to_json (tests.integration.test_cli.TestMerge) ... DEBUG:dffml.cli.cli.Merge:Setting dest = <class 'dffml.base.JSONSource'>
DEBUG:dffml.cli.cli.Merge:Setting log = 20
DEBUG:dffml.cli.cli.Merge:Setting src = <class 'dffml.base.MemorySource'>
DEBUG:dffml.MemorySource:MemorySourceConfig(records=[{'key': 'A', 'extra': {}}, {'key': 'B', 'extra': {}}, {'key': 'C', 'extra': {}}])
DEBUG:dffml.JSONSource:JSONSourceConfig(filename='/tmp/tmprvyagnv0/0.7897961320241574', tag='untagged', readwrite=True, allowempty=True)
DEBUG:dffml.JSONSource:PosixPath('/tmp/tmprvyagnv0/0.7897961320241574') is not a file, initializing memory to empty dict
DEBUG:dffml.JSONSource:/tmp/tmprvyagnv0/0.7897961320241574 first open
DEBUG:dffml.JSONSource:/tmp/tmprvyagnv0/0.7897961320241574 updated
DEBUG:dffml.JSONSource:/tmp/tmprvyagnv0/0.7897961320241574 written
DEBUG:dffml.source.json:JSONSource(JSONSourceConfig(filename=PosixPath('/tmp/tmprvyagnv0/0.7897961320241574'), tag='untagged', readwrite=True, allowempty=True)) saved 3 records
DEBUG:dffml.cli.list.ListRecords:Setting log = 20
DEBUG:dffml.cli.list.ListRecords:Setting sources = [<class 'dffml.base.JSONSource'>]
DEBUG:dffml.JSONSource:JSONSourceConfig(filename='/tmp/tmprvyagnv0/0.7897961320241574', tag='untagged', readwrite=False, allowempty=False)
DEBUG:dffml.util.AsyncContextManagerList.Sources:Entering: JSONSource(JSONSourceConfig(filename=PosixPath('/tmp/tmprvyagnv0/0.7897961320241574'), tag='untagged', readwrite=False, allowempty=False))
DEBUG:dffml.JSONSource:/tmp/tmprvyagnv0/0.7897961320241574 first open
DEBUG:dffml.source.json:JSONSource(JSONSourceConfig(filename=PosixPath('/tmp/tmprvyagnv0/0.7897961320241574'), tag='untagged', readwrite=False, allowempty=False)) loaded 3 records
DEBUG:dffml.util.AsyncContextManagerListContext.SourcesContext:Entering context: <dffml.source.memory.MemorySourceContext object at 0x7f6cf127de10>
ok
test_dataflow_run_cli_example (tests.integration.test_dataflow.TestDataFlowCreate) ... skipped 'Required plugins: shouldi, dffml-config-yaml, dffml-feature-git must be installed in development mode'
test_default (tests.integration.test_dataflow.TestDataFlowDiagram) ... skipped 'Required plugins: shouldi, dffml-config-yaml, dffml-feature-git must be installed in development mode'
test_multi_stage (tests.integration.test_dataflow.TestDataFlowDiagram) ... skipped 'Required plugins: shouldi, dffml-config-yaml, dffml-feature-git must be installed in development mode'
test_simple (tests.integration.test_dataflow.TestDataFlowDiagram) ... skipped 'Required plugins: shouldi, dffml-config-yaml, dffml-feature-git must be installed in development mode'
test_single_stage (tests.integration.test_dataflow.TestDataFlowDiagram) ... skipped 'Required plugins: shouldi, dffml-config-yaml, dffml-feature-git must be installed in development mode'
test_dataflow_usage_example (tests.integration.test_dataflow.TestDataFlowMerge) ... skipped 'Required plugins: shouldi, dffml-config-yaml, dffml-feature-git must be installed in development mode'
test_run (tests.integration.test_dataflow.TestDataFlowMerge) ... skipped 'Required plugins: shouldi, dffml-config-yaml, dffml-feature-git must be installed in development mode'
test_predict (tests.test_noasync.TestML) ... DEBUG:dffml.CSVSource:CSVSourceConfig(filename='/tmp/tmp3m9vilss/0.9450895906116866.csv', tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)
DEBUG:dffml.CSVSource:PosixPath('/tmp/tmp3m9vilss/0.9450895906116866.csv') is not a file, initializing memory to empty dict
DEBUG:dffml.CSVSource:/tmp/tmp3m9vilss/0.9450895906116866.csv first open
DEBUG:dffml.CSVSource:fieldnames: ['key', 'tag', 'Expertise', 'Salary', 'Trust', 'Years']
DEBUG:dffml.CSVSource:/tmp/tmp3m9vilss/0.9450895906116866.csv written
DEBUG:dffml.CSVSource:CSVSource(CSVSourceConfig(filename=PosixPath('/tmp/tmp3m9vilss/0.9450895906116866.csv'), tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)) saved 4 records
DEBUG:dffml.CSVSource:CSVSourceConfig(filename='/tmp/tmpy2l_7m_4/0.01978748700949351.csv', tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)
DEBUG:dffml.CSVSource:PosixPath('/tmp/tmpy2l_7m_4/0.01978748700949351.csv') is not a file, initializing memory to empty dict
DEBUG:dffml.CSVSource:/tmp/tmpy2l_7m_4/0.01978748700949351.csv first open
DEBUG:dffml.CSVSource:fieldnames: ['key', 'tag', 'Expertise', 'Salary', 'Trust', 'Years']
DEBUG:dffml.CSVSource:/tmp/tmpy2l_7m_4/0.01978748700949351.csv written
DEBUG:dffml.CSVSource:CSVSource(CSVSourceConfig(filename=PosixPath('/tmp/tmpy2l_7m_4/0.01978748700949351.csv'), tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)) saved 2 records
DEBUG:dffml.CSVSource:CSVSourceConfig(filename='/tmp/tmpai6x0w6z/0.45082104001267487.csv', tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)
DEBUG:dffml.CSVSource:PosixPath('/tmp/tmpai6x0w6z/0.45082104001267487.csv') is not a file, initializing memory to empty dict
DEBUG:dffml.CSVSource:/tmp/tmpai6x0w6z/0.45082104001267487.csv first open
DEBUG:dffml.CSVSource:fieldnames: ['key', 'tag', 'Expertise', 'Trust', 'Years']
DEBUG:dffml.CSVSource:/tmp/tmpai6x0w6z/0.45082104001267487.csv written
DEBUG:dffml.CSVSource:CSVSource(CSVSourceConfig(filename=PosixPath('/tmp/tmpai6x0w6z/0.45082104001267487.csv'), tag='untagged', readwrite=True, allowempty=True, key='key', tagcol='tag', loadfiles=None)) saved 2 records
skipped 'Required plugins: dffml-model-scikit must be installed in development mode'
test_dict (tests.test_record.TestRecord) ... ok
test_evaluated (tests.test_record.TestRecord) ... INFO:dffml.record:Evaluated full {'dead': 'beef', 'feed': 'face'}
INFO:dffml.record:Evaluated full {'new': 'feature'}
ok
test_features (tests.test_record.TestRecord) ... ok
test_key (tests.test_record.TestRecord) ... ok
test_merge (tests.test_record.TestRecord) ... ok
test_predicted (tests.test_record.TestRecord) ... ok
test_prediction (tests.test_record.TestRecord) ... ok
test_repr (tests.test_record.TestRecord) ... ok
test_str (tests.test_record.TestRecord) ... ok
test_null_dict_no_prediction (tests.test_record.TestRecordData) ... ok
test_full_bool_true (tests.test_record.TestRecordPrediction) ... ok
test_full_dict_returns_self (tests.test_record.TestRecordPrediction) ... ok
test_full_len_2 (tests.test_record.TestRecordPrediction) ... ok
test_full_property_confidence (tests.test_record.TestRecordPrediction) ... ok
test_full_property_value (tests.test_record.TestRecordPrediction) ... ok
test_null_bool_false (tests.test_record.TestRecordPrediction) ... ok
test_null_dict_empty_array (tests.test_record.TestRecordPrediction) ... ok
test_null_len_0 (tests.test_record.TestRecordPrediction) ... ok
======================================================================
ERROR: test_docstring (tests.test_docstrings.util_net_cached_download)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/src/dffml/tests/test_docstrings.py", line 207, in testcase
run_doctest(obj, state)
File "/usr/src/dffml/tests/test_docstrings.py", line 176, in run_doctest
raise Exception(output.getvalue())
Exception: Trying:
import asyncio
Expecting nothing
ok
Trying:
from dffml import cached_download
Expecting nothing
ok
Trying:
@cached_download(
"https://github.com/intel/dffml/raw/152c2b92535fac6beec419236f8639b0d75d707d/MANIFEST.in",
"MANIFEST.in",
"f7aadf5cdcf39f161a779b4fa77ec56a49630cf7680e21fb3dc6c36ce2d8c6fae0d03d5d3094a6aec4fea1561393c14c",
)
async def first_line_in_manifest_152c2b(manifest):
return manifest.read_text().split()[:2]
Expecting nothing
ok
Trying:
asyncio.run(first_line_in_manifest_152c2b())
Expecting:
['include', 'README.md']
**********************************************************************
File "/usr/src/dffml/dffml/util/net.py", line 108, in cached_download
Failed example:
asyncio.run(first_line_in_manifest_152c2b())
Exception raised:
Traceback (most recent call last):
File "/home/hashim/.cache/pip/minicondapy37/lib/python3.7/doctest.py", line 1329, in __run
compileflags, 1), test.globs)
File "<doctest cached_download[3]>", line 1, in <module>
asyncio.run(first_line_in_manifest_152c2b())
File "/home/hashim/.cache/pip/minicondapy37/lib/python3.7/asyncio/runners.py", line 43, in run
return loop.run_until_complete(main)
File "/home/hashim/.cache/pip/minicondapy37/lib/python3.7/asyncio/base_events.py", line 583, in run_until_complete
return future.result()
File "/usr/src/dffml/dffml/util/net.py", line 130, in wrapper
url, protocol_allowlist=protocol_allowlist
File "/usr/src/dffml/dffml/util/net.py", line 57, in sync_urlopen
return urllib.request.urlopen(url)
File "/home/hashim/.cache/pip/minicondapy37/lib/python3.7/urllib/request.py", line 222, in urlopen
return opener.open(url, data, timeout)
File "/home/hashim/.cache/pip/minicondapy37/lib/python3.7/urllib/request.py", line 525, in open
response = self._open(req, data)
File "/home/hashim/.cache/pip/minicondapy37/lib/python3.7/urllib/request.py", line 543, in _open
'_open', req)
File "/home/hashim/.cache/pip/minicondapy37/lib/python3.7/urllib/request.py", line 503, in _call_chain
result = func(*args)
File "/home/hashim/.cache/pip/minicondapy37/lib/python3.7/urllib/request.py", line 1362, in https_open
context=self._context, check_hostname=self._check_hostname)
File "/home/hashim/.cache/pip/minicondapy37/lib/python3.7/urllib/request.py", line 1319, in do_open
encode_chunked=req.has_header('Transfer-encoding'))
File "/home/hashim/.cache/pip/minicondapy37/lib/python3.7/http/client.py", line 1252, in request
self._send_request(method, url, body, headers, encode_chunked)
File "/home/hashim/.cache/pip/minicondapy37/lib/python3.7/http/client.py", line 1298, in _send_request
self.endheaders(body, encode_chunked=encode_chunked)
File "/home/hashim/.cache/pip/minicondapy37/lib/python3.7/http/client.py", line 1247, in endheaders
self._send_output(message_body, encode_chunked=encode_chunked)
File "/home/hashim/.cache/pip/minicondapy37/lib/python3.7/http/client.py", line 1026, in _send_output
self.send(msg)
File "/home/hashim/.cache/pip/minicondapy37/lib/python3.7/http/client.py", line 966, in send
self.connect()
File "/home/hashim/.cache/pip/minicondapy37/lib/python3.7/http/client.py", line 1414, in connect
super().connect()
File "/home/hashim/.cache/pip/minicondapy37/lib/python3.7/http/client.py", line 938, in connect
(self.host,self.port), self.timeout, self.source_address)
File "/home/hashim/.cache/pip/minicondapy37/lib/python3.7/socket.py", line 707, in create_connection
for res in getaddrinfo(host, port, 0, SOCK_STREAM):
File "/home/hashim/.cache/pip/minicondapy37/lib/python3.7/socket.py", line 752, in getaddrinfo
for res in _socket.getaddrinfo(host, port, family, type, proto, flags):
LookupError: unknown encoding: idna
======================================================================
ERROR: test_docstring (tests.test_docstrings.util_net_cached_download_unpack_archive)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/src/dffml/tests/test_docstrings.py", line 207, in testcase
run_doctest(obj, state)
File "/usr/src/dffml/tests/test_docstrings.py", line 176, in run_doctest
raise Exception(output.getvalue())
Exception: Trying:
import asyncio
Expecting nothing
ok
Trying:
from dffml import cached_download_unpack_archive
Expecting nothing
ok
Trying:
@cached_download_unpack_archive(
"https://github.com/intel/dffml/archive/152c2b92535fac6beec419236f8639b0d75d707d.tar.gz",
"dffml.tar.gz",
"dffml",
"32ba082cd8056ff4ddcb68691a590c3cb8fea2ff75c0265b8d844c5edc7eaef54136160c6090750e562059b957355b15",
)
async def files_in_dffml_commit_152c2b(dffml_dir):
return len(list(dffml_dir.rglob("**/*")))
Expecting nothing
ok
Trying:
asyncio.run(files_in_dffml_commit_152c2b())
Expecting:
594
**********************************************************************
File "/usr/src/dffml/dffml/util/net.py", line 195, in cached_download_unpack_archive
Failed example:
asyncio.run(files_in_dffml_commit_152c2b())
Exception raised:
Traceback (most recent call last):
File "/home/hashim/.cache/pip/minicondapy37/lib/python3.7/doctest.py", line 1329, in __run
compileflags, 1), test.globs)
File "<doctest cached_download_unpack_archive[3]>", line 1, in <module>
asyncio.run(files_in_dffml_commit_152c2b())
File "/home/hashim/.cache/pip/minicondapy37/lib/python3.7/asyncio/runners.py", line 43, in run
return loop.run_until_complete(main)
File "/home/hashim/.cache/pip/minicondapy37/lib/python3.7/asyncio/base_events.py", line 583, in run_until_complete
return future.result()
File "/usr/src/dffml/dffml/util/net.py", line 214, in wrapper
await extract()
File "/usr/src/dffml/dffml/util/net.py", line 130, in wrapper
url, protocol_allowlist=protocol_allowlist
File "/usr/src/dffml/dffml/util/net.py", line 57, in sync_urlopen
return urllib.request.urlopen(url)
File "/home/hashim/.cache/pip/minicondapy37/lib/python3.7/urllib/request.py", line 222, in urlopen
return opener.open(url, data, timeout)
File "/home/hashim/.cache/pip/minicondapy37/lib/python3.7/urllib/request.py", line 525, in open
response = self._open(req, data)
File "/home/hashim/.cache/pip/minicondapy37/lib/python3.7/urllib/request.py", line 543, in _open
'_open', req)
File "/home/hashim/.cache/pip/minicondapy37/lib/python3.7/urllib/request.py", line 503, in _call_chain
result = func(*args)
File "/home/hashim/.cache/pip/minicondapy37/lib/python3.7/urllib/request.py", line 1362, in https_open
context=self._context, check_hostname=self._check_hostname)
File "/home/hashim/.cache/pip/minicondapy37/lib/python3.7/urllib/request.py", line 1319, in do_open
encode_chunked=req.has_header('Transfer-encoding'))
File "/home/hashim/.cache/pip/minicondapy37/lib/python3.7/http/client.py", line 1252, in request
self._send_request(method, url, body, headers, encode_chunked)
File "/home/hashim/.cache/pip/minicondapy37/lib/python3.7/http/client.py", line 1298, in _send_request
self.endheaders(body, encode_chunked=encode_chunked)
File "/home/hashim/.cache/pip/minicondapy37/lib/python3.7/http/client.py", line 1247, in endheaders
self._send_output(message_body, encode_chunked=encode_chunked)
File "/home/hashim/.cache/pip/minicondapy37/lib/python3.7/http/client.py", line 1026, in _send_output
self.send(msg)
File "/home/hashim/.cache/pip/minicondapy37/lib/python3.7/http/client.py", line 966, in send
self.connect()
File "/home/hashim/.cache/pip/minicondapy37/lib/python3.7/http/client.py", line 1414, in connect
super().connect()
File "/home/hashim/.cache/pip/minicondapy37/lib/python3.7/http/client.py", line 938, in connect
(self.host,self.port), self.timeout, self.source_address)
File "/home/hashim/.cache/pip/minicondapy37/lib/python3.7/socket.py", line 707, in create_connection
for res in getaddrinfo(host, port, 0, SOCK_STREAM):
File "/home/hashim/.cache/pip/minicondapy37/lib/python3.7/socket.py", line 752, in getaddrinfo
for res in _socket.getaddrinfo(host, port, family, type, proto, flags):
LookupError: unknown encoding: idna
======================================================================
ERROR: test_cached_download (tests.util.test_net.TestNet)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/src/dffml/dffml/util/asynctestcase.py", line 68, in run_it
result = self.loop.run_until_complete(coro(*args, **kwargs))
File "/home/hashim/.cache/pip/minicondapy37/lib/python3.7/asyncio/base_events.py", line 583, in run_until_complete
return future.result()
File "/usr/src/dffml/.eggs/httptest-0.0.15-py3.7.egg/httptest/httptest.py", line 301, in wrap
server = HTTPServer(self._addr, self._class)
File "/usr/src/dffml/.eggs/httptest-0.0.15-py3.7.egg/httptest/httptest.py", line 211, in __init__
super().__init__(*args, **kwargs)
File "/home/hashim/.cache/pip/minicondapy37/lib/python3.7/socketserver.py", line 452, in __init__
self.server_bind()
File "/home/hashim/.cache/pip/minicondapy37/lib/python3.7/http/server.py", line 139, in server_bind
self.server_name = socket.getfqdn(host)
File "/home/hashim/.cache/pip/minicondapy37/lib/python3.7/socket.py", line 676, in getfqdn
hostname, aliases, ipaddrs = gethostbyaddr(name)
LookupError: unknown encoding: idna
======================================================================
ERROR: test_cached_download_unpack_archive (tests.util.test_net.TestNet)
----------------------------------------------------------------------
Traceback (most recent call last):
File "/usr/src/dffml/dffml/util/asynctestcase.py", line 68, in run_it
result = self.loop.run_until_complete(coro(*args, **kwargs))
File "/home/hashim/.cache/pip/minicondapy37/lib/python3.7/asyncio/base_events.py", line 583, in run_until_complete
return future.result()
File "/usr/src/dffml/.eggs/httptest-0.0.15-py3.7.egg/httptest/httptest.py", line 301, in wrap
server = HTTPServer(self._addr, self._class)
File "/usr/src/dffml/.eggs/httptest-0.0.15-py3.7.egg/httptest/httptest.py", line 211, in __init__
super().__init__(*args, **kwargs)
File "/home/hashim/.cache/pip/minicondapy37/lib/python3.7/socketserver.py", line 452, in __init__
self.server_bind()
File "/home/hashim/.cache/pip/minicondapy37/lib/python3.7/http/server.py", line 139, in server_bind
self.server_name = socket.getfqdn(host)
File "/home/hashim/.cache/pip/minicondapy37/lib/python3.7/socket.py", line 676, in getfqdn
hostname, aliases, ipaddrs = gethostbyaddr(name)
LookupError: unknown encoding: idna
----------------------------------------------------------------------
Ran 246 tests in 6.411s
FAILED (errors=4, skipped=12)
Test failed: <unittest.runner.TextTestResult run=246 errors=4 failures=0>
error: Test failed: <unittest.runner.TextTestResult run=246 errors=4 failures=0>
+ cleanup_temp_dirs
+ '[' x '!=' x ']'
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment