Spaces:
Running
Running
Choose environment per workspace.
Browse files- server/llm_ops.py +10 -8
- server/lynxkite_ops.py +52 -9
- server/lynxscribe_ops.py +2 -1
- server/main.py +4 -2
- server/networkx_ops.py +24 -21
- server/ops.py +21 -9
- server/pytorch_model_ops.py +4 -2
- server/workspace.py +6 -39
- web/src/LynxKiteFlow.svelte +76 -24
- web/src/Workspace.svelte +3 -52
server/llm_ops.py
CHANGED
|
@@ -7,6 +7,8 @@ import pandas as pd
|
|
| 7 |
client = openai.OpenAI(base_url="http://localhost:11434/v1")
|
| 8 |
CACHE = {}
|
| 9 |
|
|
|
|
|
|
|
| 10 |
def chat(*args, **kwargs):
|
| 11 |
key = json.dumps({'args': args, 'kwargs': kwargs})
|
| 12 |
if key not in CACHE:
|
|
@@ -14,11 +16,11 @@ def chat(*args, **kwargs):
|
|
| 14 |
CACHE[key] = [c.message.content for c in completion.choices]
|
| 15 |
return CACHE[key]
|
| 16 |
|
| 17 |
-
@
|
| 18 |
def input(*, filename: ops.PathStr, key: str):
|
| 19 |
return pd.read_csv(filename).rename(columns={key: 'text'})
|
| 20 |
|
| 21 |
-
@
|
| 22 |
def create_prompt(input, *, template: ops.LongStr):
|
| 23 |
assert template, 'Please specify the template. Refer to columns using their names in uppercase.'
|
| 24 |
df = input.copy()
|
|
@@ -32,15 +34,15 @@ def create_prompt(input, *, template: ops.LongStr):
|
|
| 32 |
return df
|
| 33 |
|
| 34 |
|
| 35 |
-
@
|
| 36 |
-
def ask_llm(input, *, model: str,
|
| 37 |
assert model, 'Please specify the model.'
|
| 38 |
assert 'prompt' in input.columns, 'Please create the prompt first.'
|
| 39 |
df = input.copy()
|
| 40 |
g = {}
|
| 41 |
-
if
|
| 42 |
g['extra_body'] = {
|
| 43 |
-
"
|
| 44 |
}
|
| 45 |
for i, row in df.iterrows():
|
| 46 |
[res] = chat(
|
|
@@ -54,7 +56,7 @@ def ask_llm(input, *, model: str, choices: list = None, max_tokens: int = 100):
|
|
| 54 |
df.loc[i, 'response'] = res
|
| 55 |
return df
|
| 56 |
|
| 57 |
-
@
|
| 58 |
def view(input):
|
| 59 |
v = {
|
| 60 |
'dataframes': { 'df': {
|
|
@@ -66,7 +68,7 @@ def view(input):
|
|
| 66 |
|
| 67 |
@ops.input_position(input="right")
|
| 68 |
@ops.output_position(output="left")
|
| 69 |
-
@
|
| 70 |
def loop(input, *, max_iterations: int = 10):
|
| 71 |
'''Data can flow back here until it becomes empty or reaches the limit.'''
|
| 72 |
return input
|
|
|
|
| 7 |
client = openai.OpenAI(base_url="http://localhost:11434/v1")
|
| 8 |
CACHE = {}
|
| 9 |
|
| 10 |
+
op = ops.op_registration('LLM logic')
|
| 11 |
+
|
| 12 |
def chat(*args, **kwargs):
|
| 13 |
key = json.dumps({'args': args, 'kwargs': kwargs})
|
| 14 |
if key not in CACHE:
|
|
|
|
| 16 |
CACHE[key] = [c.message.content for c in completion.choices]
|
| 17 |
return CACHE[key]
|
| 18 |
|
| 19 |
+
@op("Input")
|
| 20 |
def input(*, filename: ops.PathStr, key: str):
|
| 21 |
return pd.read_csv(filename).rename(columns={key: 'text'})
|
| 22 |
|
| 23 |
+
@op("Create prompt")
|
| 24 |
def create_prompt(input, *, template: ops.LongStr):
|
| 25 |
assert template, 'Please specify the template. Refer to columns using their names in uppercase.'
|
| 26 |
df = input.copy()
|
|
|
|
| 34 |
return df
|
| 35 |
|
| 36 |
|
| 37 |
+
@op("Ask LLM")
|
| 38 |
+
def ask_llm(input, *, model: str, accepted_regex: str = None, max_tokens: int = 100):
|
| 39 |
assert model, 'Please specify the model.'
|
| 40 |
assert 'prompt' in input.columns, 'Please create the prompt first.'
|
| 41 |
df = input.copy()
|
| 42 |
g = {}
|
| 43 |
+
if accepted_regex:
|
| 44 |
g['extra_body'] = {
|
| 45 |
+
"guided_regex": accepted_regex,
|
| 46 |
}
|
| 47 |
for i, row in df.iterrows():
|
| 48 |
[res] = chat(
|
|
|
|
| 56 |
df.loc[i, 'response'] = res
|
| 57 |
return df
|
| 58 |
|
| 59 |
+
@op("View", view="table_view")
|
| 60 |
def view(input):
|
| 61 |
v = {
|
| 62 |
'dataframes': { 'df': {
|
|
|
|
| 68 |
|
| 69 |
@ops.input_position(input="right")
|
| 70 |
@ops.output_position(output="left")
|
| 71 |
+
@op("Loop")
|
| 72 |
def loop(input, *, max_iterations: int = 10):
|
| 73 |
'''Data can flow back here until it becomes empty or reaches the limit.'''
|
| 74 |
return input
|
server/lynxkite_ops.py
CHANGED
|
@@ -3,26 +3,29 @@ from . import ops
|
|
| 3 |
import matplotlib
|
| 4 |
import networkx as nx
|
| 5 |
import pandas as pd
|
|
|
|
| 6 |
|
| 7 |
-
|
|
|
|
|
|
|
| 8 |
def import_parquet(*, filename: str):
|
| 9 |
'''Imports a parquet file.'''
|
| 10 |
return pd.read_parquet(filename)
|
| 11 |
|
| 12 |
-
@
|
| 13 |
def create_scale_free_graph(*, nodes: int = 10):
|
| 14 |
'''Creates a scale-free graph with the given number of nodes.'''
|
| 15 |
return nx.scale_free_graph(nodes)
|
| 16 |
|
| 17 |
-
@
|
| 18 |
@ops.nx_node_attribute_func('pagerank')
|
| 19 |
def compute_pagerank(graph: nx.Graph, *, damping=0.85, iterations=100):
|
| 20 |
return nx.pagerank(graph, alpha=damping, max_iter=iterations)
|
| 21 |
|
| 22 |
|
| 23 |
-
@
|
| 24 |
-
def
|
| 25 |
-
'''
|
| 26 |
return nx.scale_free_graph(nodes)
|
| 27 |
|
| 28 |
|
|
@@ -32,10 +35,9 @@ def _map_color(value):
|
|
| 32 |
rgba = cmap(value)
|
| 33 |
return ['#{:02x}{:02x}{:02x}'.format(int(r*255), int(g*255), int(b*255)) for r, g, b in rgba[:, :3]]
|
| 34 |
|
| 35 |
-
@
|
| 36 |
def visualize_graph(graph: ops.Bundle, *, color_nodes_by: 'node_attribute' = None):
|
| 37 |
nodes = graph.dfs['nodes'].copy()
|
| 38 |
-
node_attributes = sorted(nodes.columns)
|
| 39 |
if color_nodes_by:
|
| 40 |
nodes['color'] = _map_color(nodes[color_nodes_by])
|
| 41 |
nodes = nodes.to_records()
|
|
@@ -76,7 +78,7 @@ def visualize_graph(graph: ops.Bundle, *, color_nodes_by: 'node_attribute' = Non
|
|
| 76 |
}
|
| 77 |
return v
|
| 78 |
|
| 79 |
-
@
|
| 80 |
def view_tables(bundle: ops.Bundle):
|
| 81 |
v = {
|
| 82 |
'dataframes': { name: {
|
|
@@ -87,3 +89,44 @@ def view_tables(bundle: ops.Bundle):
|
|
| 87 |
'other': bundle.other,
|
| 88 |
}
|
| 89 |
return v
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 3 |
import matplotlib
|
| 4 |
import networkx as nx
|
| 5 |
import pandas as pd
|
| 6 |
+
import traceback
|
| 7 |
|
| 8 |
+
op = ops.op_registration('LynxKite')
|
| 9 |
+
|
| 10 |
+
@op("Import Parquet")
|
| 11 |
def import_parquet(*, filename: str):
|
| 12 |
'''Imports a parquet file.'''
|
| 13 |
return pd.read_parquet(filename)
|
| 14 |
|
| 15 |
+
@op("Create scale-free graph")
|
| 16 |
def create_scale_free_graph(*, nodes: int = 10):
|
| 17 |
'''Creates a scale-free graph with the given number of nodes.'''
|
| 18 |
return nx.scale_free_graph(nodes)
|
| 19 |
|
| 20 |
+
@op("Compute PageRank")
|
| 21 |
@ops.nx_node_attribute_func('pagerank')
|
| 22 |
def compute_pagerank(graph: nx.Graph, *, damping=0.85, iterations=100):
|
| 23 |
return nx.pagerank(graph, alpha=damping, max_iter=iterations)
|
| 24 |
|
| 25 |
|
| 26 |
+
@op("Sample graph")
|
| 27 |
+
def sample_graph(graph: nx.Graph, *, nodes: int = 100):
|
| 28 |
+
'''Takes a subgraph.'''
|
| 29 |
return nx.scale_free_graph(nodes)
|
| 30 |
|
| 31 |
|
|
|
|
| 35 |
rgba = cmap(value)
|
| 36 |
return ['#{:02x}{:02x}{:02x}'.format(int(r*255), int(g*255), int(b*255)) for r, g, b in rgba[:, :3]]
|
| 37 |
|
| 38 |
+
@op("Visualize graph", view="visualization")
|
| 39 |
def visualize_graph(graph: ops.Bundle, *, color_nodes_by: 'node_attribute' = None):
|
| 40 |
nodes = graph.dfs['nodes'].copy()
|
|
|
|
| 41 |
if color_nodes_by:
|
| 42 |
nodes['color'] = _map_color(nodes[color_nodes_by])
|
| 43 |
nodes = nodes.to_records()
|
|
|
|
| 78 |
}
|
| 79 |
return v
|
| 80 |
|
| 81 |
+
@op("View tables", view="table_view")
|
| 82 |
def view_tables(bundle: ops.Bundle):
|
| 83 |
v = {
|
| 84 |
'dataframes': { name: {
|
|
|
|
| 89 |
'other': bundle.other,
|
| 90 |
}
|
| 91 |
return v
|
| 92 |
+
|
| 93 |
+
@ops.register_executor('LynxKite')
|
| 94 |
+
def execute(ws):
|
| 95 |
+
catalog = ops.CATALOGS['LynxKite']
|
| 96 |
+
# Nodes are responsible for interpreting/executing their child nodes.
|
| 97 |
+
nodes = [n for n in ws.nodes if not n.parentId]
|
| 98 |
+
children = {}
|
| 99 |
+
for n in ws.nodes:
|
| 100 |
+
if n.parentId:
|
| 101 |
+
children.setdefault(n.parentId, []).append(n)
|
| 102 |
+
outputs = {}
|
| 103 |
+
failed = 0
|
| 104 |
+
while len(outputs) + failed < len(nodes):
|
| 105 |
+
for node in nodes:
|
| 106 |
+
if node.id in outputs:
|
| 107 |
+
continue
|
| 108 |
+
inputs = [edge.source for edge in ws.edges if edge.target == node.id]
|
| 109 |
+
if all(input in outputs for input in inputs):
|
| 110 |
+
inputs = [outputs[input] for input in inputs]
|
| 111 |
+
data = node.data
|
| 112 |
+
op = catalog[data.title]
|
| 113 |
+
params = {**data.params}
|
| 114 |
+
if op.sub_nodes:
|
| 115 |
+
sub_nodes = children.get(node.id, [])
|
| 116 |
+
sub_node_ids = [node.id for node in sub_nodes]
|
| 117 |
+
sub_edges = [edge for edge in ws.edges if edge.source in sub_node_ids]
|
| 118 |
+
params['sub_flow'] = {'nodes': sub_nodes, 'edges': sub_edges}
|
| 119 |
+
try:
|
| 120 |
+
output = op(*inputs, **params)
|
| 121 |
+
except Exception as e:
|
| 122 |
+
traceback.print_exc()
|
| 123 |
+
data.error = str(e)
|
| 124 |
+
failed += 1
|
| 125 |
+
continue
|
| 126 |
+
if len(op.inputs) == 1 and op.inputs.get('multi') == '*':
|
| 127 |
+
# It's a flexible input. Create n+1 handles.
|
| 128 |
+
data.inputs = {f'input{i}': None for i in range(len(inputs) + 1)}
|
| 129 |
+
data.error = None
|
| 130 |
+
outputs[node.id] = output
|
| 131 |
+
if op.type == 'visualization' or op.type == 'table_view':
|
| 132 |
+
data.view = output
|
server/lynxscribe_ops.py
CHANGED
|
@@ -1,6 +1,7 @@
|
|
| 1 |
'''An example of passive ops. Just using LynxKite to describe the configuration of a complex system.'''
|
| 2 |
-
from .ops import
|
| 3 |
|
|
|
|
| 4 |
reg('Scrape documents', params=[P.basic('url', '')])
|
| 5 |
reg('Conversation logs')
|
| 6 |
reg('Extract graph', inputs=['input'])
|
|
|
|
| 1 |
'''An example of passive ops. Just using LynxKite to describe the configuration of a complex system.'''
|
| 2 |
+
from .ops import passive_op_registration, Parameter as P, MULTI_INPUT
|
| 3 |
|
| 4 |
+
reg = passive_op_registration('LynxScribe')
|
| 5 |
reg('Scrape documents', params=[P.basic('url', '')])
|
| 6 |
reg('Conversation logs')
|
| 7 |
reg('Extract graph', inputs=['input'])
|
server/main.py
CHANGED
|
@@ -8,7 +8,7 @@ from . import workspace
|
|
| 8 |
from . import lynxkite_ops
|
| 9 |
# from . import networkx_ops
|
| 10 |
# from . import pytorch_model_ops
|
| 11 |
-
|
| 12 |
from . import llm_ops
|
| 13 |
|
| 14 |
app = fastapi.FastAPI()
|
|
@@ -16,7 +16,9 @@ app = fastapi.FastAPI()
|
|
| 16 |
|
| 17 |
@app.get("/api/catalog")
|
| 18 |
def get_catalog():
|
| 19 |
-
return {
|
|
|
|
|
|
|
| 20 |
|
| 21 |
|
| 22 |
class SaveRequest(workspace.BaseConfig):
|
|
|
|
| 8 |
from . import lynxkite_ops
|
| 9 |
# from . import networkx_ops
|
| 10 |
# from . import pytorch_model_ops
|
| 11 |
+
from . import lynxscribe_ops
|
| 12 |
from . import llm_ops
|
| 13 |
|
| 14 |
app = fastapi.FastAPI()
|
|
|
|
| 16 |
|
| 17 |
@app.get("/api/catalog")
|
| 18 |
def get_catalog():
|
| 19 |
+
return {
|
| 20 |
+
k: {op.name: op.model_dump() for op in v.values()}
|
| 21 |
+
for k, v in ops.CATALOGS.items()}
|
| 22 |
|
| 23 |
|
| 24 |
class SaveRequest(workspace.BaseConfig):
|
server/networkx_ops.py
CHANGED
|
@@ -5,7 +5,7 @@ import inspect
|
|
| 5 |
import networkx as nx
|
| 6 |
|
| 7 |
|
| 8 |
-
def wrapped(func):
|
| 9 |
@functools.wraps(func)
|
| 10 |
def wrapper(*args, **kwargs):
|
| 11 |
for k, v in kwargs.items():
|
|
@@ -20,24 +20,27 @@ def wrapped(func):
|
|
| 20 |
return graph
|
| 21 |
return wrapper
|
| 22 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 23 |
|
| 24 |
-
|
| 25 |
-
if hasattr(func, 'graphs'):
|
| 26 |
-
sig = inspect.signature(func)
|
| 27 |
-
inputs = {k: nx.Graph for k in func.graphs}
|
| 28 |
-
params = {
|
| 29 |
-
name: ops.Parameter.basic(
|
| 30 |
-
name, str(param.default)
|
| 31 |
-
if type(param.default) in [str, int, float]
|
| 32 |
-
else None,
|
| 33 |
-
param.annotation)
|
| 34 |
-
for name, param in sig.parameters.items()
|
| 35 |
-
if name not in ['G', 'backend', 'backend_kwargs']}
|
| 36 |
-
for p in params.values():
|
| 37 |
-
if not p.type:
|
| 38 |
-
# Guess the type based on the name.
|
| 39 |
-
if len(p.name) == 1:
|
| 40 |
-
p.type = int
|
| 41 |
-
name = "NX › " + name.replace('_', ' ').title()
|
| 42 |
-
op = ops.Op(wrapped(func), name, params=params, inputs=inputs, outputs={'output': 'yes'}, type='basic')
|
| 43 |
-
ops.ALL_OPS[name] = op
|
|
|
|
| 5 |
import networkx as nx
|
| 6 |
|
| 7 |
|
| 8 |
+
def wrapped(name: str, func):
|
| 9 |
@functools.wraps(func)
|
| 10 |
def wrapper(*args, **kwargs):
|
| 11 |
for k, v in kwargs.items():
|
|
|
|
| 20 |
return graph
|
| 21 |
return wrapper
|
| 22 |
|
| 23 |
+
def register_networkx(env: str):
|
| 24 |
+
ops.CATALOGS.setdefault(env, {})
|
| 25 |
+
for (name, func) in nx.__dict__.items():
|
| 26 |
+
if hasattr(func, 'graphs'):
|
| 27 |
+
sig = inspect.signature(func)
|
| 28 |
+
inputs = {k: nx.Graph for k in func.graphs}
|
| 29 |
+
params = {
|
| 30 |
+
name: ops.Parameter.basic(
|
| 31 |
+
name, str(param.default)
|
| 32 |
+
if type(param.default) in [str, int, float]
|
| 33 |
+
else None,
|
| 34 |
+
param.annotation)
|
| 35 |
+
for name, param in sig.parameters.items()
|
| 36 |
+
if name not in ['G', 'backend', 'backend_kwargs']}
|
| 37 |
+
for p in params.values():
|
| 38 |
+
if not p.type:
|
| 39 |
+
# Guess the type based on the name.
|
| 40 |
+
if len(p.name) == 1:
|
| 41 |
+
p.type = int
|
| 42 |
+
name = "NX › " + name.replace('_', ' ').title()
|
| 43 |
+
op = ops.Op(wrapped(name, func), name, params=params, inputs=inputs, outputs={'output': 'yes'}, type='basic')
|
| 44 |
+
ops.CATALOGS[env][name] = op
|
| 45 |
|
| 46 |
+
register_networkx('LynxKite')
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
server/ops.py
CHANGED
|
@@ -10,7 +10,9 @@ import pydantic
|
|
| 10 |
import typing
|
| 11 |
from typing_extensions import Annotated
|
| 12 |
|
| 13 |
-
|
|
|
|
|
|
|
| 14 |
typeof = type # We have some arguments called "type".
|
| 15 |
def type_to_json(t):
|
| 16 |
if isinstance(t, type) and issubclass(t, enum.Enum):
|
|
@@ -166,7 +168,7 @@ def nx_node_attribute_func(name):
|
|
| 166 |
return decorator
|
| 167 |
|
| 168 |
|
| 169 |
-
def op(name, *, view='basic', sub_nodes=None):
|
| 170 |
'''Decorator for defining an operation.'''
|
| 171 |
def decorator(func):
|
| 172 |
sig = inspect.signature(func)
|
|
@@ -184,7 +186,8 @@ def op(name, *, view='basic', sub_nodes=None):
|
|
| 184 |
if sub_nodes is not None:
|
| 185 |
op.sub_nodes = sub_nodes
|
| 186 |
op.type = 'sub_flow'
|
| 187 |
-
|
|
|
|
| 188 |
func.__op__ = op
|
| 189 |
return func
|
| 190 |
return decorator
|
|
@@ -212,7 +215,7 @@ def no_op(*args, **kwargs):
|
|
| 212 |
return args[0]
|
| 213 |
return Bundle()
|
| 214 |
|
| 215 |
-
def register_passive_op(name, inputs=[], outputs=['output'], params=[]):
|
| 216 |
'''A passive operation has no associated code.'''
|
| 217 |
op = Op(
|
| 218 |
func=no_op,
|
|
@@ -224,10 +227,19 @@ def register_passive_op(name, inputs=[], outputs=['output'], params=[]):
|
|
| 224 |
outputs=dict(
|
| 225 |
(o, Output(name=o, type=None)) if isinstance(o, str)
|
| 226 |
else (o.name, o) for o in outputs))
|
| 227 |
-
|
|
|
|
| 228 |
return op
|
| 229 |
|
| 230 |
-
def
|
| 231 |
-
'''
|
| 232 |
-
|
| 233 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 10 |
import typing
|
| 11 |
from typing_extensions import Annotated
|
| 12 |
|
| 13 |
+
CATALOGS = {}
|
| 14 |
+
EXECUTORS = {}
|
| 15 |
+
|
| 16 |
typeof = type # We have some arguments called "type".
|
| 17 |
def type_to_json(t):
|
| 18 |
if isinstance(t, type) and issubclass(t, enum.Enum):
|
|
|
|
| 168 |
return decorator
|
| 169 |
|
| 170 |
|
| 171 |
+
def op(env: str, name: str, *, view='basic', sub_nodes=None):
|
| 172 |
'''Decorator for defining an operation.'''
|
| 173 |
def decorator(func):
|
| 174 |
sig = inspect.signature(func)
|
|
|
|
| 186 |
if sub_nodes is not None:
|
| 187 |
op.sub_nodes = sub_nodes
|
| 188 |
op.type = 'sub_flow'
|
| 189 |
+
CATALOGS.setdefault(env, {})
|
| 190 |
+
CATALOGS[env][name] = op
|
| 191 |
func.__op__ = op
|
| 192 |
return func
|
| 193 |
return decorator
|
|
|
|
| 215 |
return args[0]
|
| 216 |
return Bundle()
|
| 217 |
|
| 218 |
+
def register_passive_op(env: str, name: str, inputs=[], outputs=['output'], params=[]):
|
| 219 |
'''A passive operation has no associated code.'''
|
| 220 |
op = Op(
|
| 221 |
func=no_op,
|
|
|
|
| 227 |
outputs=dict(
|
| 228 |
(o, Output(name=o, type=None)) if isinstance(o, str)
|
| 229 |
else (o.name, o) for o in outputs))
|
| 230 |
+
CATALOGS.setdefault(env, {})
|
| 231 |
+
CATALOGS[env][name] = op
|
| 232 |
return op
|
| 233 |
|
| 234 |
+
def register_executor(env: str):
|
| 235 |
+
'''Decorator for registering an executor.'''
|
| 236 |
+
def decorator(func):
|
| 237 |
+
EXECUTORS[env] = func
|
| 238 |
+
return func
|
| 239 |
+
return decorator
|
| 240 |
+
|
| 241 |
+
def op_registration(env: str):
|
| 242 |
+
return functools.partial(op, env)
|
| 243 |
+
|
| 244 |
+
def passive_op_registration(env: str):
|
| 245 |
+
return functools.partial(register_passive_op, env)
|
server/pytorch_model_ops.py
CHANGED
|
@@ -5,12 +5,14 @@ from . import ops
|
|
| 5 |
|
| 6 |
LAYERS = {}
|
| 7 |
|
| 8 |
-
|
|
|
|
|
|
|
| 9 |
def define_pytorch_model(*, sub_flow):
|
| 10 |
print('sub_flow:', sub_flow)
|
| 11 |
return ops.Bundle(other={'model': str(sub_flow)})
|
| 12 |
|
| 13 |
-
@
|
| 14 |
def train_pytorch_model(model, graph):
|
| 15 |
# import torch # Lazy import because it's slow.
|
| 16 |
return 'hello ' + str(model)
|
|
|
|
| 5 |
|
| 6 |
LAYERS = {}
|
| 7 |
|
| 8 |
+
op = ops.op_registration('LynxKite')
|
| 9 |
+
|
| 10 |
+
@op("Define PyTorch model", sub_nodes=LAYERS)
|
| 11 |
def define_pytorch_model(*, sub_flow):
|
| 12 |
print('sub_flow:', sub_flow)
|
| 13 |
return ops.Bundle(other={'model': str(sub_flow)})
|
| 14 |
|
| 15 |
+
@op("Train PyTorch model")
|
| 16 |
def train_pytorch_model(model, graph):
|
| 17 |
# import torch # Lazy import because it's slow.
|
| 18 |
return 'hello ' + str(model)
|
server/workspace.py
CHANGED
|
@@ -37,48 +37,14 @@ class WorkspaceEdge(BaseConfig):
|
|
| 37 |
target: str
|
| 38 |
|
| 39 |
class Workspace(BaseConfig):
|
|
|
|
| 40 |
nodes: list[WorkspaceNode] = dataclasses.field(default_factory=list)
|
| 41 |
edges: list[WorkspaceEdge] = dataclasses.field(default_factory=list)
|
| 42 |
|
| 43 |
|
| 44 |
-
def execute(ws):
|
| 45 |
-
|
| 46 |
-
|
| 47 |
-
children = {}
|
| 48 |
-
for n in ws.nodes:
|
| 49 |
-
if n.parentId:
|
| 50 |
-
children.setdefault(n.parentId, []).append(n)
|
| 51 |
-
outputs = {}
|
| 52 |
-
failed = 0
|
| 53 |
-
while len(outputs) + failed < len(nodes):
|
| 54 |
-
for node in nodes:
|
| 55 |
-
if node.id in outputs:
|
| 56 |
-
continue
|
| 57 |
-
inputs = [edge.source for edge in ws.edges if edge.target == node.id]
|
| 58 |
-
if all(input in outputs for input in inputs):
|
| 59 |
-
inputs = [outputs[input] for input in inputs]
|
| 60 |
-
data = node.data
|
| 61 |
-
op = ops.ALL_OPS[data.title]
|
| 62 |
-
params = {**data.params}
|
| 63 |
-
if op.sub_nodes:
|
| 64 |
-
sub_nodes = children.get(node.id, [])
|
| 65 |
-
sub_node_ids = [node.id for node in sub_nodes]
|
| 66 |
-
sub_edges = [edge for edge in ws.edges if edge.source in sub_node_ids]
|
| 67 |
-
params['sub_flow'] = {'nodes': sub_nodes, 'edges': sub_edges}
|
| 68 |
-
try:
|
| 69 |
-
output = op(*inputs, **params)
|
| 70 |
-
except Exception as e:
|
| 71 |
-
traceback.print_exc()
|
| 72 |
-
data.error = str(e)
|
| 73 |
-
failed += 1
|
| 74 |
-
continue
|
| 75 |
-
if len(op.inputs) == 1 and op.inputs.get('multi') == '*':
|
| 76 |
-
# It's a flexible input. Create n+1 handles.
|
| 77 |
-
data.inputs = {f'input{i}': None for i in range(len(inputs) + 1)}
|
| 78 |
-
data.error = None
|
| 79 |
-
outputs[node.id] = output
|
| 80 |
-
if op.type == 'visualization' or op.type == 'table_view':
|
| 81 |
-
data.view = output
|
| 82 |
|
| 83 |
|
| 84 |
def save(ws: Workspace, path: str):
|
|
@@ -101,6 +67,7 @@ def load(path: str):
|
|
| 101 |
|
| 102 |
|
| 103 |
def _update_metadata(ws):
|
|
|
|
| 104 |
nodes = {node.id: node for node in ws.nodes}
|
| 105 |
done = set()
|
| 106 |
while len(done) < len(nodes):
|
|
@@ -109,7 +76,7 @@ def _update_metadata(ws):
|
|
| 109 |
continue
|
| 110 |
data = node.data
|
| 111 |
if node.parentId is None:
|
| 112 |
-
op =
|
| 113 |
elif node.parentId not in nodes:
|
| 114 |
data.error = f'Parent not found: {node.parentId}'
|
| 115 |
done.add(node.id)
|
|
|
|
| 37 |
target: str
|
| 38 |
|
| 39 |
class Workspace(BaseConfig):
|
| 40 |
+
env: str = ''
|
| 41 |
nodes: list[WorkspaceNode] = dataclasses.field(default_factory=list)
|
| 42 |
edges: list[WorkspaceEdge] = dataclasses.field(default_factory=list)
|
| 43 |
|
| 44 |
|
| 45 |
+
def execute(ws: Workspace):
|
| 46 |
+
if ws.env in ops.EXECUTORS:
|
| 47 |
+
ops.EXECUTORS[ws.env](ws)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 48 |
|
| 49 |
|
| 50 |
def save(ws: Workspace, path: str):
|
|
|
|
| 67 |
|
| 68 |
|
| 69 |
def _update_metadata(ws):
|
| 70 |
+
catalog = ops.CATALOGS.get(ws.env, {})
|
| 71 |
nodes = {node.id: node for node in ws.nodes}
|
| 72 |
done = set()
|
| 73 |
while len(done) < len(nodes):
|
|
|
|
| 76 |
continue
|
| 77 |
data = node.data
|
| 78 |
if node.parentId is None:
|
| 79 |
+
op = catalog.get(data.title)
|
| 80 |
elif node.parentId not in nodes:
|
| 81 |
data.error = f'Parent not found: {node.parentId}'
|
| 82 |
done.add(node.id)
|
web/src/LynxKiteFlow.svelte
CHANGED
|
@@ -4,7 +4,6 @@
|
|
| 4 |
import {
|
| 5 |
SvelteFlow,
|
| 6 |
Controls,
|
| 7 |
-
Background,
|
| 8 |
MiniMap,
|
| 9 |
MarkerType,
|
| 10 |
useSvelteFlow,
|
|
@@ -14,6 +13,9 @@
|
|
| 14 |
type Connection,
|
| 15 |
type NodeTypes,
|
| 16 |
} from '@xyflow/svelte';
|
|
|
|
|
|
|
|
|
|
| 17 |
import { useQuery, useMutation, useQueryClient } from '@sveltestack/svelte-query';
|
| 18 |
import NodeWithParams from './NodeWithParams.svelte';
|
| 19 |
import NodeWithVisualization from './NodeWithVisualization.svelte';
|
|
@@ -21,6 +23,7 @@
|
|
| 21 |
import NodeWithSubFlow from './NodeWithSubFlow.svelte';
|
| 22 |
import NodeWithArea from './NodeWithArea.svelte';
|
| 23 |
import NodeSearch from './NodeSearch.svelte';
|
|
|
|
| 24 |
import '@xyflow/svelte/dist/style.css';
|
| 25 |
|
| 26 |
export let path = '';
|
|
@@ -42,7 +45,7 @@
|
|
| 42 |
return await res.json();
|
| 43 |
}, {
|
| 44 |
onSuccess: data => queryClient.setQueryData(['workspace', path], data),
|
| 45 |
-
})
|
| 46 |
|
| 47 |
const nodeTypes: NodeTypes = {
|
| 48 |
basic: NodeWithParams,
|
|
@@ -73,7 +76,7 @@
|
|
| 73 |
event.preventDefault();
|
| 74 |
nodeSearchSettings = {
|
| 75 |
pos: { x: event.clientX, y: event.clientY },
|
| 76 |
-
boxes: $
|
| 77 |
};
|
| 78 |
}
|
| 79 |
function addNode(e) {
|
|
@@ -106,13 +109,10 @@
|
|
| 106 |
});
|
| 107 |
closeNodeSearch();
|
| 108 |
}
|
| 109 |
-
const
|
| 110 |
-
async function getBoxes() {
|
| 111 |
const res = await fetch('/api/catalog');
|
| 112 |
-
|
| 113 |
-
|
| 114 |
-
}
|
| 115 |
-
getBoxes();
|
| 116 |
|
| 117 |
let nodeSearchSettings: {
|
| 118 |
pos: XYPosition,
|
|
@@ -153,6 +153,7 @@
|
|
| 153 |
delete node.markerEnd;
|
| 154 |
delete node.selected;
|
| 155 |
}
|
|
|
|
| 156 |
const ws = orderedJSON(g);
|
| 157 |
const bd = orderedJSON($backendWorkspace.data);
|
| 158 |
if (ws === bd) return;
|
|
@@ -182,23 +183,74 @@
|
|
| 182 |
parentId: node.id,
|
| 183 |
};
|
| 184 |
}
|
|
|
|
| 185 |
|
| 186 |
</script>
|
| 187 |
|
| 188 |
-
<div
|
| 189 |
-
<
|
| 190 |
-
|
| 191 |
-
|
| 192 |
-
|
| 193 |
-
|
| 194 |
-
|
| 195 |
-
|
| 196 |
-
|
| 197 |
-
|
| 198 |
-
|
| 199 |
-
|
| 200 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 201 |
<NodeSearch pos={nodeSearchSettings.pos} boxes={nodeSearchSettings.boxes} on:cancel={closeNodeSearch} on:add={addNode} />
|
| 202 |
-
|
| 203 |
-
|
|
|
|
| 204 |
</div>
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 4 |
import {
|
| 5 |
SvelteFlow,
|
| 6 |
Controls,
|
|
|
|
| 7 |
MiniMap,
|
| 8 |
MarkerType,
|
| 9 |
useSvelteFlow,
|
|
|
|
| 13 |
type Connection,
|
| 14 |
type NodeTypes,
|
| 15 |
} from '@xyflow/svelte';
|
| 16 |
+
import ArrowBack from 'virtual:icons/tabler/arrow-back'
|
| 17 |
+
import Backspace from 'virtual:icons/tabler/backspace'
|
| 18 |
+
import Atom from 'virtual:icons/tabler/Atom'
|
| 19 |
import { useQuery, useMutation, useQueryClient } from '@sveltestack/svelte-query';
|
| 20 |
import NodeWithParams from './NodeWithParams.svelte';
|
| 21 |
import NodeWithVisualization from './NodeWithVisualization.svelte';
|
|
|
|
| 23 |
import NodeWithSubFlow from './NodeWithSubFlow.svelte';
|
| 24 |
import NodeWithArea from './NodeWithArea.svelte';
|
| 25 |
import NodeSearch from './NodeSearch.svelte';
|
| 26 |
+
import EnvironmentSelector from './EnvironmentSelector.svelte';
|
| 27 |
import '@xyflow/svelte/dist/style.css';
|
| 28 |
|
| 29 |
export let path = '';
|
|
|
|
| 45 |
return await res.json();
|
| 46 |
}, {
|
| 47 |
onSuccess: data => queryClient.setQueryData(['workspace', path], data),
|
| 48 |
+
});
|
| 49 |
|
| 50 |
const nodeTypes: NodeTypes = {
|
| 51 |
basic: NodeWithParams,
|
|
|
|
| 76 |
event.preventDefault();
|
| 77 |
nodeSearchSettings = {
|
| 78 |
pos: { x: event.clientX, y: event.clientY },
|
| 79 |
+
boxes: $catalog.data[$backendWorkspace.data?.env],
|
| 80 |
};
|
| 81 |
}
|
| 82 |
function addNode(e) {
|
|
|
|
| 109 |
});
|
| 110 |
closeNodeSearch();
|
| 111 |
}
|
| 112 |
+
const catalog = useQuery(['catalog'], async () => {
|
|
|
|
| 113 |
const res = await fetch('/api/catalog');
|
| 114 |
+
return res.json();
|
| 115 |
+
}, {staleTime: 60000, retry: false});
|
|
|
|
|
|
|
| 116 |
|
| 117 |
let nodeSearchSettings: {
|
| 118 |
pos: XYPosition,
|
|
|
|
| 153 |
delete node.markerEnd;
|
| 154 |
delete node.selected;
|
| 155 |
}
|
| 156 |
+
g.env = $backendWorkspace.data?.env;
|
| 157 |
const ws = orderedJSON(g);
|
| 158 |
const bd = orderedJSON($backendWorkspace.data);
|
| 159 |
if (ws === bd) return;
|
|
|
|
| 183 |
parentId: node.id,
|
| 184 |
};
|
| 185 |
}
|
| 186 |
+
$: parentDir = path.split('/').slice(0, -1).join('/');
|
| 187 |
|
| 188 |
</script>
|
| 189 |
|
| 190 |
+
<div class="page">
|
| 191 |
+
<div class="top-bar">
|
| 192 |
+
<div class="ws-name">
|
| 193 |
+
<a href><img src="/favicon.ico"></a>
|
| 194 |
+
{path}
|
| 195 |
+
</div>
|
| 196 |
+
<div class="tools">
|
| 197 |
+
<EnvironmentSelector
|
| 198 |
+
options={Object.keys($catalog.data || {})}
|
| 199 |
+
value={$backendWorkspace.data?.env}
|
| 200 |
+
onChange={(env) => $mutation.mutate({ path, ws: { ...$backendWorkspace.data, env } })}
|
| 201 |
+
/>
|
| 202 |
+
<a href><Atom /></a>
|
| 203 |
+
<a href><Backspace /></a>
|
| 204 |
+
<a href="#dir?path={parentDir}"><ArrowBack /></a>
|
| 205 |
+
</div>
|
| 206 |
+
</div>
|
| 207 |
+
<div style:height="100%">
|
| 208 |
+
<SvelteFlow {nodes} {edges} {nodeTypes} fitView
|
| 209 |
+
on:paneclick={toggleNodeSearch}
|
| 210 |
+
on:nodeclick={nodeClick}
|
| 211 |
+
proOptions={{ hideAttribution: true }}
|
| 212 |
+
maxZoom={3}
|
| 213 |
+
minZoom={0.3}
|
| 214 |
+
onconnect={onconnect}
|
| 215 |
+
defaultEdgeOptions={{ markerEnd: { type: MarkerType.Arrow } }}
|
| 216 |
+
>
|
| 217 |
+
<Controls />
|
| 218 |
+
<MiniMap />
|
| 219 |
+
{#if nodeSearchSettings}
|
| 220 |
<NodeSearch pos={nodeSearchSettings.pos} boxes={nodeSearchSettings.boxes} on:cancel={closeNodeSearch} on:add={addNode} />
|
| 221 |
+
{/if}
|
| 222 |
+
</SvelteFlow>
|
| 223 |
+
</div>
|
| 224 |
</div>
|
| 225 |
+
|
| 226 |
+
<style>
|
| 227 |
+
.top-bar {
|
| 228 |
+
display: flex;
|
| 229 |
+
justify-content: space-between;
|
| 230 |
+
background: oklch(30% 0.13 230);
|
| 231 |
+
color: white;
|
| 232 |
+
}
|
| 233 |
+
.ws-name {
|
| 234 |
+
font-size: 1.5em;
|
| 235 |
+
}
|
| 236 |
+
.ws-name img {
|
| 237 |
+
height: 1.5em;
|
| 238 |
+
vertical-align: middle;
|
| 239 |
+
margin: 4px;
|
| 240 |
+
}
|
| 241 |
+
.page {
|
| 242 |
+
display: flex;
|
| 243 |
+
flex-direction: column;
|
| 244 |
+
height: 100vh;
|
| 245 |
+
}
|
| 246 |
+
|
| 247 |
+
.tools {
|
| 248 |
+
display: flex;
|
| 249 |
+
align-items: center;
|
| 250 |
+
}
|
| 251 |
+
.tools a {
|
| 252 |
+
color: oklch(75% 0.13 230);
|
| 253 |
+
font-size: 1.5em;
|
| 254 |
+
padding: 0 10px;
|
| 255 |
+
}
|
| 256 |
+
</style>
|
web/src/Workspace.svelte
CHANGED
|
@@ -2,62 +2,13 @@
|
|
| 2 |
// This is the whole LynxKite workspace editor page.
|
| 3 |
import { QueryClient, QueryClientProvider } from '@sveltestack/svelte-query'
|
| 4 |
import { SvelteFlowProvider } from '@xyflow/svelte';
|
| 5 |
-
import ArrowBack from 'virtual:icons/tabler/arrow-back'
|
| 6 |
-
import Backspace from 'virtual:icons/tabler/backspace'
|
| 7 |
-
import Atom from 'virtual:icons/tabler/Atom'
|
| 8 |
import LynxKiteFlow from './LynxKiteFlow.svelte';
|
| 9 |
export let path = '';
|
| 10 |
-
$: parent = path.split('/').slice(0, -1).join('/');
|
| 11 |
const queryClient = new QueryClient()
|
| 12 |
</script>
|
| 13 |
|
| 14 |
<QueryClientProvider client={queryClient}>
|
| 15 |
-
<
|
| 16 |
-
<
|
| 17 |
-
|
| 18 |
-
<a href><img src="/favicon.ico"></a>
|
| 19 |
-
{path}
|
| 20 |
-
</div>
|
| 21 |
-
<div class="tools">
|
| 22 |
-
<a href><Atom /></a>
|
| 23 |
-
<a href><Backspace /></a>
|
| 24 |
-
<a href="#dir?path={parent}"><ArrowBack /></a>
|
| 25 |
-
</div>
|
| 26 |
-
</div>
|
| 27 |
-
<SvelteFlowProvider>
|
| 28 |
-
<LynxKiteFlow path={path} />
|
| 29 |
-
</SvelteFlowProvider>
|
| 30 |
-
</div>
|
| 31 |
</QueryClientProvider>
|
| 32 |
-
|
| 33 |
-
<style>
|
| 34 |
-
.top-bar {
|
| 35 |
-
display: flex;
|
| 36 |
-
justify-content: space-between;
|
| 37 |
-
background: oklch(30% 0.13 230);
|
| 38 |
-
color: white;
|
| 39 |
-
}
|
| 40 |
-
.ws-name {
|
| 41 |
-
font-size: 1.5em;
|
| 42 |
-
}
|
| 43 |
-
.ws-name img {
|
| 44 |
-
height: 1.5em;
|
| 45 |
-
vertical-align: middle;
|
| 46 |
-
margin: 4px;
|
| 47 |
-
}
|
| 48 |
-
.page {
|
| 49 |
-
display: flex;
|
| 50 |
-
flex-direction: column;
|
| 51 |
-
height: 100vh;
|
| 52 |
-
}
|
| 53 |
-
|
| 54 |
-
.tools {
|
| 55 |
-
display: flex;
|
| 56 |
-
align-items: center;
|
| 57 |
-
}
|
| 58 |
-
.tools a {
|
| 59 |
-
color: oklch(75% 0.13 230);
|
| 60 |
-
font-size: 1.5em;
|
| 61 |
-
padding: 0 10px;
|
| 62 |
-
}
|
| 63 |
-
</style>
|
|
|
|
| 2 |
// This is the whole LynxKite workspace editor page.
|
| 3 |
import { QueryClient, QueryClientProvider } from '@sveltestack/svelte-query'
|
| 4 |
import { SvelteFlowProvider } from '@xyflow/svelte';
|
|
|
|
|
|
|
|
|
|
| 5 |
import LynxKiteFlow from './LynxKiteFlow.svelte';
|
| 6 |
export let path = '';
|
|
|
|
| 7 |
const queryClient = new QueryClient()
|
| 8 |
</script>
|
| 9 |
|
| 10 |
<QueryClientProvider client={queryClient}>
|
| 11 |
+
<SvelteFlowProvider>
|
| 12 |
+
<LynxKiteFlow path={path} />
|
| 13 |
+
</SvelteFlowProvider>
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 14 |
</QueryClientProvider>
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|