Last active
April 14, 2020 17:39
-
-
Save mhash1m/146dbf75a78aac9951bf14a10b2367dc to your computer and use it in GitHub Desktop.
Code Example for db query in `operation/db`
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
examplefile, database_name = tempfile.mkstemp(prefix="example", suffix=".db") | |
os.close(examplefile) | |
sdb = SqliteDatabase(SqliteDatabaseConfig(filename=database_name)) | |
dataflow = DataFlow( | |
operations={ | |
"db_query_create": db_query_create_table.op, | |
"db_query_insert": db_query_insert.op, | |
"db_query_update": db_query_update.op, | |
"db_query_lookup": db_query_lookup.op, | |
"get_single": GetSingle.imp.op, | |
}, | |
configs={ | |
"db_query_create": DatabaseQueryConfig(database=sdb), | |
"db_query_update": DatabaseQueryConfig(database=sdb), | |
"db_query_insert": DatabaseQueryConfig(database=sdb), | |
"db_query_lookup": DatabaseQueryConfig(database=sdb), | |
}, | |
seed=[], | |
implementations={ | |
db_query_create_table.op.name: db_query_create_table.imp, | |
db_query_lookup.op.name: db_query_lookup.imp, | |
db_query_insert.op.name: db_query_insert.imp, | |
db_query_update.op.name: db_query_update.imp, | |
}, | |
) | |
dataflow.seed.append( | |
Input( | |
value=[db_query_lookup.op.outputs["lookups"].name], | |
definition=GetSingle.op.inputs["spec"], | |
) | |
) | |
##### INPUTS1 ########################################################################### | |
inputs = [ | |
Input( | |
value="myTable", | |
definition=db_query_create_table.op.inputs["table_name"], | |
), | |
Input( | |
value={ | |
"key": "real", | |
"firstName": "text", | |
"lastName": "text", | |
"age": "real", | |
}, | |
definition=db_query_create_table.op.inputs["cols"], | |
), | |
Input( | |
value="myTable", definition=db_query_insert.op.inputs["table_name"], | |
), | |
Input( | |
value={"key": 10, "firstName": "John", "lastName": "Doe", "age": 16}, | |
definition=db_query_insert.op.inputs["data"], | |
), | |
Input( | |
value="myTable", definition=db_query_update.op.inputs["table_name"], | |
), | |
Input( | |
value={"key": 11, "firstName": "John", "lastName": "Miles", "age": 37}, | |
definition=db_query_update.op.inputs["data"], | |
), | |
Input(value=[], definition=db_query_update.op.inputs["conditions"],), | |
Input( | |
value="myTable", definition=db_query_lookup.op.inputs["table_name"], | |
), | |
Input( | |
value={ | |
"key": "real", | |
"firstName": "text", | |
"lastName": "text", | |
"age": "real", | |
}, | |
definition=db_query_lookup.op.inputs["cols"], | |
), | |
Input(value=[], definition=db_query_lookup.op.inputs["conditions"],), | |
] | |
async def main(): | |
async for ctx, result in MemoryOrchestrator.run(dataflow, inputs): | |
print(result) | |
asyncio.run(main()) | |
####OUTPUT | |
````````````````````````````````````` | |
{'query_lookups': [{'key': 10.0, 'firstName': 'John', 'lastName': 'Doe', 'age': 16.0}, {'key': 10.0, 'firstName': 'John', 'lastName': 'Doe', 'age': 16.0}, {'key': 10.0, 'firstName': 'John', 'lastName': 'Doe', 'age': 16.0}, {'key': 10.0, 'firstName': 'John', 'lastName': 'Doe', 'age': 16.0}, {'key': 10.0, 'firstName': 'John', 'lastName': 'Doe', 'age': 16.0}, {'key': 10.0, 'firstName': 'John', 'lastName': 'Doe', 'age': 16.0}, {'key': 10.0, 'firstName': 'John', 'lastName': 'Doe', 'age': 16.0}, {'key': 10.0, 'firstName': 'John', 'lastName': 'Doe', 'age': 16.0}]} | |
````````````````````````````````````` | |
##### INPUTS2 ########################################################################### | |
inputs = { | |
"create":[ | |
Input( | |
value="myTable", | |
definition=db_query_create_table.op.inputs["table_name"], | |
), | |
Input( | |
value={ | |
"key": "real", | |
"firstName": "text", | |
"lastName": "text", | |
"age": "real", | |
}, | |
definition=db_query_create_table.op.inputs["cols"], | |
), | |
], | |
"insert":[ | |
Input( | |
value="myTable", definition=db_query_insert.op.inputs["table_name"], | |
), | |
Input( | |
value={"key": 10, "firstName": "John", "lastName": "Doe", "age": 16}, | |
definition=db_query_insert.op.inputs["data"], | |
), | |
], | |
"insert":[ | |
Input( | |
value="myTable", definition=db_query_insert.op.inputs["table_name"], | |
), | |
Input( | |
value={"key": 11, "firstName": "John", "lastName": "Miles", "age": 37}, | |
definition=db_query_insert.op.inputs["data"], | |
), | |
], | |
"insert":[ | |
Input( | |
value="myTable", definition=db_query_insert.op.inputs["table_name"], | |
), | |
Input( | |
value={"key": 12, "firstName": "Bill", "lastName": "Miles", "age": 40}, | |
definition=db_query_insert.op.inputs["data"], | |
), | |
], | |
"update": [ | |
Input( | |
value="myTable", definition=db_query_update.op.inputs["table_name"], | |
), | |
Input( | |
value={"key": 11, "firstName": "John", "lastName": "Miles", "age": 37}, | |
definition=db_query_update.op.inputs["data"], | |
), | |
], | |
"lookup": [ | |
Input( | |
value="myTable", definition=db_query_lookup.op.inputs["table_name"], | |
), | |
Input( | |
value={ | |
"key": "real", | |
"firstName": "text", | |
"lastName": "text", | |
"age": "real", | |
}, | |
definition=db_query_lookup.op.inputs["cols"], | |
), | |
Input(value=[], definition=db_query_lookup.op.inputs["conditions"],), | |
] | |
} | |
async def main(): | |
async for ctx, result in MemoryOrchestrator.run(dataflow, inputs): | |
print(result) | |
asyncio.run(main()) | |
#######OUTPUT | |
`````````````````````````````` | |
{'query_lookups': [{'key': 12.0, 'firstName': 'Bill', 'lastName': 'Miles', 'age': 40.0}, {'key': 11.0, 'firstName': 'John', 'lastName': 'Miles', 'age': 37.0}]} | |
Traceback (most recent call last): | |
File "Example.py", line 346, in <module> | |
asyncio.run(main()) | |
File "/usr/local/lib/python3.7/asyncio/runners.py", line 43, in run | |
return loop.run_until_complete(main) | |
File "/usr/local/lib/python3.7/asyncio/base_events.py", line 583, in run_until_complete | |
return future.result() | |
File "Example.py", line 342, in main | |
async for ctx, result in MemoryOrchestrator.run(dataflow, inputs): | |
File "/usr/src/dffml/dffml/df/base.py", line 817, in run | |
async for ctx, results in octx.run(inputs): | |
File "/usr/src/dffml/dffml/df/memory.py", line 1353, in run | |
raise exception | |
File "/usr/src/dffml/dffml/df/memory.py", line 1567, in run_operations_for_ctx | |
ctx, Stage.OUTPUT | |
File "/usr/src/dffml/dffml/df/memory.py", line 1564, in <dictcomp> | |
output = { | |
File "/usr/src/dffml/dffml/df/memory.py", line 1592, in run_stage | |
ctx, self, operation, await parameter_set._asdict() | |
File "/usr/src/dffml/dffml/df/memory.py", line 936, in run | |
outputs = await opctx.run(inputs) | |
File "/usr/src/dffml/dffml/operation/output.py", line 245, in run | |
want = await super().run(inputs) | |
File "/usr/src/dffml/dffml/operation/output.py", line 172, in run | |
self.ctx, exported[convert] | |
File "/usr/src/dffml/dffml/df/memory.py", line 397, in definition | |
"%s: %s" % (handle_string, definition) | |
dffml.df.exceptions.DefinitionNotInContext: insert: query_lookups | |
``````````````````````````````` |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment