darabos commited on
Commit
15bcc0e
·
1 Parent(s): c4b5e4b

Remove sub_nodes/sub_flow/parentId.

Browse files
Files changed (5) hide show
  1. server/lynxkite_ops.py +177 -148
  2. server/ops.py +184 -149
  3. server/workspace.py +16 -17
  4. web/src/apiTypes.ts +0 -1
  5. web/src/index.css +44 -6
server/lynxkite_ops.py CHANGED
@@ -1,4 +1,5 @@
1
- '''Some operations. To be split into separate files when we have more.'''
 
2
  from . import ops
3
  from collections import deque
4
  import dataclasses
@@ -9,72 +10,85 @@ import pandas as pd
9
  import traceback
10
  import typing
11
 
12
- op = ops.op_registration('LynxKite')
 
13
 
14
  @dataclasses.dataclass
15
  class RelationDefinition:
16
- '''Defines a set of edges.'''
17
- df: str # The DataFrame that contains the edges.
18
- source_column: str # The column in the edge DataFrame that contains the source node ID.
19
- target_column: str # The column in the edge DataFrame that contains the target node ID.
20
- source_table: str # The DataFrame that contains the source nodes.
21
- target_table: str # The DataFrame that contains the target nodes.
22
- source_key: str # The column in the source table that contains the node ID.
23
- target_key: str # The column in the target table that contains the node ID.
 
 
 
 
 
 
24
 
25
  @dataclasses.dataclass
26
  class Bundle:
27
- '''A collection of DataFrames and other data.
28
-
29
- Can efficiently represent a knowledge graph (homogeneous or heterogeneous) or tabular data.
30
- It can also carry other data, such as a trained model.
31
- '''
32
- dfs: dict[str, pd.DataFrame] = dataclasses.field(default_factory=dict)
33
- relations: list[RelationDefinition] = dataclasses.field(default_factory=list)
34
- other: dict[str, typing.Any] = None
35
-
36
- @classmethod
37
- def from_nx(cls, graph: nx.Graph):
38
- edges = nx.to_pandas_edgelist(graph)
39
- d = dict(graph.nodes(data=True))
40
- nodes = pd.DataFrame(d.values(), index=d.keys())
41
- nodes['id'] = nodes.index
42
- return cls(
43
- dfs={'edges': edges, 'nodes': nodes},
44
- relations=[
45
- RelationDefinition(
46
- df='edges',
47
- source_column='source',
48
- target_column='target',
49
- source_table='nodes',
50
- target_table='nodes',
51
- source_key='id',
52
- target_key='id',
 
 
 
53
  )
54
- ]
55
- )
56
 
57
- def to_nx(self):
58
- graph = nx.from_pandas_edgelist(self.dfs['edges'])
59
- nx.set_node_attributes(graph, self.dfs['nodes'].set_index('id').to_dict('index'))
60
- return graph
 
 
61
 
62
 
63
  def nx_node_attribute_func(name):
64
- '''Decorator for wrapping a function that adds a NetworkX node attribute.'''
65
- def decorator(func):
66
- @functools.wraps(func)
67
- def wrapper(graph: nx.Graph, **kwargs):
68
- graph = graph.copy()
69
- attr = func(graph, **kwargs)
70
- nx.set_node_attributes(graph, attr, name)
71
- return graph
72
- return wrapper
73
- return decorator
 
 
 
74
 
75
 
76
  def disambiguate_edges(ws):
77
- '''If an input plug is connected to multiple edges, keep only the last edge.'''
78
  seen = set()
79
  for edge in reversed(ws.edges):
80
  if (edge.target, edge.targetHandle) in seen:
@@ -82,20 +96,15 @@ def disambiguate_edges(ws):
82
  seen.add((edge.target, edge.targetHandle))
83
 
84
 
85
- @ops.register_executor('LynxKite')
86
  async def execute(ws):
87
- catalog = ops.CATALOGS['LynxKite']
88
  # Nodes are responsible for interpreting/executing their child nodes.
89
- nodes = [n for n in ws.nodes if not n.parentId]
90
  disambiguate_edges(ws)
91
- children = {}
92
- for n in ws.nodes:
93
- if n.parentId:
94
- children.setdefault(n.parentId, []).append(n)
95
  outputs = {}
96
  failed = 0
97
- while len(outputs) + failed < len(nodes):
98
- for node in nodes:
99
  if node.id in outputs:
100
  continue
101
  # TODO: Take the input/output handles into account.
@@ -107,118 +116,138 @@ async def execute(ws):
107
  params = {**data.params}
108
  # Convert inputs.
109
  for i, (x, p) in enumerate(zip(inputs, op.inputs.values())):
110
- if p.type == nx.Graph and isinstance(x, Bundle):
111
- inputs[i] = x.to_nx()
112
- elif p.type == Bundle and isinstance(x, nx.Graph):
113
- inputs[i] = Bundle.from_nx(x)
114
  try:
115
- output = op(*inputs, **params)
116
  except Exception as e:
117
- traceback.print_exc()
118
- data.error = str(e)
119
- failed += 1
120
- continue
121
- if len(op.inputs) == 1 and op.inputs.get('multi') == '*':
122
  # It's a flexible input. Create n+1 handles.
123
- data.inputs = {f'input{i}': None for i in range(len(inputs) + 1)}
124
  data.error = None
125
  outputs[node.id] = output
126
- if op.type == 'visualization' or op.type == 'table_view' or op.type == 'image':
 
 
 
 
127
  data.display = output
128
 
 
129
  @op("Import Parquet")
130
  def import_parquet(*, filename: str):
131
- '''Imports a parquet file.'''
132
- return pd.read_parquet(filename)
 
133
 
134
  @op("Create scale-free graph")
135
  def create_scale_free_graph(*, nodes: int = 10):
136
- '''Creates a scale-free graph with the given number of nodes.'''
137
- return nx.scale_free_graph(nodes)
 
138
 
139
  @op("Compute PageRank")
140
- @nx_node_attribute_func('pagerank')
141
  def compute_pagerank(graph: nx.Graph, *, damping=0.85, iterations=100):
142
- return nx.pagerank(graph, alpha=damping, max_iter=iterations)
 
143
 
144
  @op("Discard loop edges")
145
  def discard_loop_edges(graph: nx.Graph):
146
- graph = graph.copy()
147
- graph.remove_edges_from(nx.selfloop_edges(graph))
148
- return graph
 
149
 
150
  @op("Sample graph")
151
  def sample_graph(graph: nx.Graph, *, nodes: int = 100):
152
- '''Takes a (preferably connected) subgraph.'''
153
- sample = set()
154
- to_expand = deque([0])
155
- while to_expand and len(sample) < nodes:
156
- node = to_expand.pop()
157
- for n in graph.neighbors(node):
158
- if n not in sample:
159
- sample.add(n)
160
- to_expand.append(n)
161
- if len(sample) == nodes:
162
- break
163
- return nx.Graph(graph.subgraph(sample))
 
164
 
165
  def _map_color(value):
166
- cmap = matplotlib.cm.get_cmap('viridis')
167
- value = (value - value.min()) / (value.max() - value.min())
168
- rgba = cmap(value)
169
- return ['#{:02x}{:02x}{:02x}'.format(int(r*255), int(g*255), int(b*255)) for r, g, b in rgba[:, :3]]
 
 
 
 
170
 
171
  @op("Visualize graph", view="visualization")
172
  def visualize_graph(graph: Bundle, *, color_nodes_by: ops.NodeAttribute = None):
173
- nodes = graph.dfs['nodes'].copy()
174
- if color_nodes_by:
175
- nodes['color'] = _map_color(nodes[color_nodes_by])
176
- nodes = nodes.to_records()
177
- edges = graph.dfs['edges'].drop_duplicates(['source', 'target'])
178
- edges = edges.to_records()
179
- pos = nx.spring_layout(graph.to_nx(), iterations=max(1, int(10000/len(nodes))))
180
- v = {
181
- 'animationDuration': 500,
182
- 'animationEasingUpdate': 'quinticInOut',
183
- 'series': [
184
- {
185
- 'type': 'graph',
186
- 'roam': True,
187
- 'lineStyle': {
188
- 'color': 'gray',
189
- 'curveness': 0.3,
190
- },
191
- 'emphasis': {
192
- 'focus': 'adjacency',
193
- 'lineStyle': {
194
- 'width': 10,
195
- }
196
- },
197
- 'data': [
198
- {
199
- 'id': str(n.id),
200
- 'x': float(pos[n.id][0]), 'y': float(pos[n.id][1]),
201
- # Adjust node size to cover the same area no matter how many nodes there are.
202
- 'symbolSize': 50 / len(nodes) ** 0.5,
203
- 'itemStyle': {'color': n.color} if color_nodes_by else {},
204
- }
205
- for n in nodes],
206
- 'links': [
207
- {'source': str(r.source), 'target': str(r.target)}
208
- for r in edges],
209
- },
210
- ],
211
- }
212
- return v
 
 
 
213
 
214
  @op("View tables", view="table_view")
215
  def view_tables(bundle: Bundle):
216
- v = {
217
- 'dataframes': { name: {
218
- 'columns': [str(c) for c in df.columns],
219
- 'data': df.values.tolist(),
220
- } for name, df in bundle.dfs.items() },
221
- 'relations': bundle.relations,
222
- 'other': bundle.other,
223
- }
224
- return v
 
 
 
 
1
+ """Some operations. To be split into separate files when we have more."""
2
+
3
  from . import ops
4
  from collections import deque
5
  import dataclasses
 
10
  import traceback
11
  import typing
12
 
13
+ op = ops.op_registration("LynxKite")
14
+
15
 
16
  @dataclasses.dataclass
17
  class RelationDefinition:
18
+ """Defines a set of edges."""
19
+
20
+ df: str # The DataFrame that contains the edges.
21
+ source_column: (
22
+ str # The column in the edge DataFrame that contains the source node ID.
23
+ )
24
+ target_column: (
25
+ str # The column in the edge DataFrame that contains the target node ID.
26
+ )
27
+ source_table: str # The DataFrame that contains the source nodes.
28
+ target_table: str # The DataFrame that contains the target nodes.
29
+ source_key: str # The column in the source table that contains the node ID.
30
+ target_key: str # The column in the target table that contains the node ID.
31
+
32
 
33
  @dataclasses.dataclass
34
  class Bundle:
35
+ """A collection of DataFrames and other data.
36
+
37
+ Can efficiently represent a knowledge graph (homogeneous or heterogeneous) or tabular data.
38
+ It can also carry other data, such as a trained model.
39
+ """
40
+
41
+ dfs: dict[str, pd.DataFrame] = dataclasses.field(default_factory=dict)
42
+ relations: list[RelationDefinition] = dataclasses.field(default_factory=list)
43
+ other: dict[str, typing.Any] = None
44
+
45
+ @classmethod
46
+ def from_nx(cls, graph: nx.Graph):
47
+ edges = nx.to_pandas_edgelist(graph)
48
+ d = dict(graph.nodes(data=True))
49
+ nodes = pd.DataFrame(d.values(), index=d.keys())
50
+ nodes["id"] = nodes.index
51
+ return cls(
52
+ dfs={"edges": edges, "nodes": nodes},
53
+ relations=[
54
+ RelationDefinition(
55
+ df="edges",
56
+ source_column="source",
57
+ target_column="target",
58
+ source_table="nodes",
59
+ target_table="nodes",
60
+ source_key="id",
61
+ target_key="id",
62
+ )
63
+ ],
64
  )
 
 
65
 
66
+ def to_nx(self):
67
+ graph = nx.from_pandas_edgelist(self.dfs["edges"])
68
+ nx.set_node_attributes(
69
+ graph, self.dfs["nodes"].set_index("id").to_dict("index")
70
+ )
71
+ return graph
72
 
73
 
74
  def nx_node_attribute_func(name):
75
+ """Decorator for wrapping a function that adds a NetworkX node attribute."""
76
+
77
+ def decorator(func):
78
+ @functools.wraps(func)
79
+ def wrapper(graph: nx.Graph, **kwargs):
80
+ graph = graph.copy()
81
+ attr = func(graph, **kwargs)
82
+ nx.set_node_attributes(graph, attr, name)
83
+ return graph
84
+
85
+ return wrapper
86
+
87
+ return decorator
88
 
89
 
90
  def disambiguate_edges(ws):
91
+ """If an input plug is connected to multiple edges, keep only the last edge."""
92
  seen = set()
93
  for edge in reversed(ws.edges):
94
  if (edge.target, edge.targetHandle) in seen:
 
96
  seen.add((edge.target, edge.targetHandle))
97
 
98
 
99
+ @ops.register_executor("LynxKite")
100
  async def execute(ws):
101
+ catalog = ops.CATALOGS["LynxKite"]
102
  # Nodes are responsible for interpreting/executing their child nodes.
 
103
  disambiguate_edges(ws)
 
 
 
 
104
  outputs = {}
105
  failed = 0
106
+ while len(outputs) + failed < len(ws.nodes):
107
+ for node in ws.nodes:
108
  if node.id in outputs:
109
  continue
110
  # TODO: Take the input/output handles into account.
 
116
  params = {**data.params}
117
  # Convert inputs.
118
  for i, (x, p) in enumerate(zip(inputs, op.inputs.values())):
119
+ if p.type == nx.Graph and isinstance(x, Bundle):
120
+ inputs[i] = x.to_nx()
121
+ elif p.type == Bundle and isinstance(x, nx.Graph):
122
+ inputs[i] = Bundle.from_nx(x)
123
  try:
124
+ output = op(*inputs, **params)
125
  except Exception as e:
126
+ traceback.print_exc()
127
+ data.error = str(e)
128
+ failed += 1
129
+ continue
130
+ if len(op.inputs) == 1 and op.inputs.get("multi") == "*":
131
  # It's a flexible input. Create n+1 handles.
132
+ data.inputs = {f"input{i}": None for i in range(len(inputs) + 1)}
133
  data.error = None
134
  outputs[node.id] = output
135
+ if (
136
+ op.type == "visualization"
137
+ or op.type == "table_view"
138
+ or op.type == "image"
139
+ ):
140
  data.display = output
141
 
142
+
143
  @op("Import Parquet")
144
  def import_parquet(*, filename: str):
145
+ """Imports a parquet file."""
146
+ return pd.read_parquet(filename)
147
+
148
 
149
  @op("Create scale-free graph")
150
  def create_scale_free_graph(*, nodes: int = 10):
151
+ """Creates a scale-free graph with the given number of nodes."""
152
+ return nx.scale_free_graph(nodes)
153
+
154
 
155
  @op("Compute PageRank")
156
+ @nx_node_attribute_func("pagerank")
157
  def compute_pagerank(graph: nx.Graph, *, damping=0.85, iterations=100):
158
+ return nx.pagerank(graph, alpha=damping, max_iter=iterations)
159
+
160
 
161
  @op("Discard loop edges")
162
  def discard_loop_edges(graph: nx.Graph):
163
+ graph = graph.copy()
164
+ graph.remove_edges_from(nx.selfloop_edges(graph))
165
+ return graph
166
+
167
 
168
  @op("Sample graph")
169
  def sample_graph(graph: nx.Graph, *, nodes: int = 100):
170
+ """Takes a (preferably connected) subgraph."""
171
+ sample = set()
172
+ to_expand = deque([0])
173
+ while to_expand and len(sample) < nodes:
174
+ node = to_expand.pop()
175
+ for n in graph.neighbors(node):
176
+ if n not in sample:
177
+ sample.add(n)
178
+ to_expand.append(n)
179
+ if len(sample) == nodes:
180
+ break
181
+ return nx.Graph(graph.subgraph(sample))
182
+
183
 
184
  def _map_color(value):
185
+ cmap = matplotlib.cm.get_cmap("viridis")
186
+ value = (value - value.min()) / (value.max() - value.min())
187
+ rgba = cmap(value)
188
+ return [
189
+ "#{:02x}{:02x}{:02x}".format(int(r * 255), int(g * 255), int(b * 255))
190
+ for r, g, b in rgba[:, :3]
191
+ ]
192
+
193
 
194
  @op("Visualize graph", view="visualization")
195
  def visualize_graph(graph: Bundle, *, color_nodes_by: ops.NodeAttribute = None):
196
+ nodes = graph.dfs["nodes"].copy()
197
+ if color_nodes_by:
198
+ nodes["color"] = _map_color(nodes[color_nodes_by])
199
+ nodes = nodes.to_records()
200
+ edges = graph.dfs["edges"].drop_duplicates(["source", "target"])
201
+ edges = edges.to_records()
202
+ pos = nx.spring_layout(graph.to_nx(), iterations=max(1, int(10000 / len(nodes))))
203
+ v = {
204
+ "animationDuration": 500,
205
+ "animationEasingUpdate": "quinticInOut",
206
+ "series": [
207
+ {
208
+ "type": "graph",
209
+ "roam": True,
210
+ "lineStyle": {
211
+ "color": "gray",
212
+ "curveness": 0.3,
213
+ },
214
+ "emphasis": {
215
+ "focus": "adjacency",
216
+ "lineStyle": {
217
+ "width": 10,
218
+ },
219
+ },
220
+ "data": [
221
+ {
222
+ "id": str(n.id),
223
+ "x": float(pos[n.id][0]),
224
+ "y": float(pos[n.id][1]),
225
+ # Adjust node size to cover the same area no matter how many nodes there are.
226
+ "symbolSize": 50 / len(nodes) ** 0.5,
227
+ "itemStyle": {"color": n.color} if color_nodes_by else {},
228
+ }
229
+ for n in nodes
230
+ ],
231
+ "links": [
232
+ {"source": str(r.source), "target": str(r.target)} for r in edges
233
+ ],
234
+ },
235
+ ],
236
+ }
237
+ return v
238
+
239
 
240
  @op("View tables", view="table_view")
241
  def view_tables(bundle: Bundle):
242
+ v = {
243
+ "dataframes": {
244
+ name: {
245
+ "columns": [str(c) for c in df.columns],
246
+ "data": df.values.tolist(),
247
+ }
248
+ for name, df in bundle.dfs.items()
249
+ },
250
+ "relations": bundle.relations,
251
+ "other": bundle.other,
252
+ }
253
+ return v
server/ops.py CHANGED
@@ -1,4 +1,5 @@
1
- '''API for implementing LynxKite operations.'''
 
2
  from __future__ import annotations
3
  import enum
4
  import functools
@@ -10,180 +11,214 @@ from typing_extensions import Annotated
10
  CATALOGS = {}
11
  EXECUTORS = {}
12
 
13
- typeof = type # We have some arguments called "type".
 
 
14
  def type_to_json(t):
15
- if isinstance(t, type) and issubclass(t, enum.Enum):
16
- return {'enum': list(t.__members__.keys())}
17
- if getattr(t, '__metadata__', None):
18
- return t.__metadata__[-1]
19
- return {'type': str(t)}
20
- Type = Annotated[
21
- typing.Any, pydantic.PlainSerializer(type_to_json, return_type=dict)
22
- ]
23
- LongStr = Annotated[
24
- str, {'format': 'textarea'}
25
- ]
26
- PathStr = Annotated[
27
- str, {'format': 'path'}
28
- ]
29
- CollapsedStr = Annotated[
30
- str, {'format': 'collapsed'}
31
- ]
32
- NodeAttribute = Annotated[
33
- str, {'format': 'node attribute'}
34
- ]
35
- EdgeAttribute = Annotated[
36
- str, {'format': 'edge attribute'}
37
- ]
38
  class BaseConfig(pydantic.BaseModel):
39
- model_config = pydantic.ConfigDict(
40
- arbitrary_types_allowed=True,
41
- )
42
 
43
 
44
  class Parameter(BaseConfig):
45
- '''Defines a parameter for an operation.'''
46
- name: str
47
- default: typing.Any
48
- type: Type = None
49
-
50
- @staticmethod
51
- def options(name, options, default=None):
52
- e = enum.Enum(f'OptionsFor_{name}', options)
53
- return Parameter.basic(name, e[default or options[0]], e)
54
-
55
- @staticmethod
56
- def collapsed(name, default, type=None):
57
- return Parameter.basic(name, default, CollapsedStr)
58
-
59
- @staticmethod
60
- def basic(name, default=None, type=None):
61
- if default is inspect._empty:
62
- default = None
63
- if type is None or type is inspect._empty:
64
- type = typeof(default) if default is not None else None
65
- return Parameter(name=name, default=default, type=type)
 
 
66
 
67
  class Input(BaseConfig):
68
- name: str
69
- type: Type
70
- position: str = 'left'
 
71
 
72
  class Output(BaseConfig):
73
- name: str
74
- type: Type
75
- position: str = 'right'
 
 
 
 
76
 
77
- MULTI_INPUT = Input(name='multi', type='*')
78
  def basic_inputs(*names):
79
- return {name: Input(name=name, type=None) for name in names}
 
 
80
  def basic_outputs(*names):
81
- return {name: Output(name=name, type=None) for name in names}
82
 
83
 
84
  class Op(BaseConfig):
85
- func: typing.Callable = pydantic.Field(exclude=True)
86
- name: str
87
- params: dict[str, Parameter]
88
- inputs: dict[str, Input]
89
- outputs: dict[str, Output]
90
- type: str = 'basic' # The UI to use for this operation.
91
- sub_nodes: list[Op] = None # If set, these nodes can be placed inside the operation's node.
92
-
93
- def __call__(self, *inputs, **params):
94
- # Convert parameters.
95
- for p in params:
96
- if p in self.params:
97
- if self.params[p].type == int:
98
- params[p] = int(params[p])
99
- elif self.params[p].type == float:
100
- params[p] = float(params[p])
101
- elif isinstance(self.params[p].type, enum.EnumMeta):
102
- params[p] = self.params[p].type[params[p]]
103
- res = self.func(*inputs, **params)
104
- return res
105
-
106
-
107
- def op(env: str, name: str, *, view='basic', sub_nodes=None, outputs=None):
108
- '''Decorator for defining an operation.'''
109
- def decorator(func):
110
- sig = inspect.signature(func)
111
- # Positional arguments are inputs.
112
- inputs = {
113
- name: Input(name=name, type=param.annotation)
114
- for name, param in sig.parameters.items()
115
- if param.kind != param.KEYWORD_ONLY}
116
- params = {}
117
- for n, param in sig.parameters.items():
118
- if param.kind == param.KEYWORD_ONLY and not n.startswith('_'):
119
- params[n] = Parameter.basic(n, param.default, param.annotation)
120
- if outputs:
121
- _outputs = {name: Output(name=name, type=None) for name in outputs}
122
- else:
123
- _outputs = {'output': Output(name='output', type=None)} if view == 'basic' else {}
124
- op = Op(func=func, name=name, params=params, inputs=inputs, outputs=_outputs, type=view)
125
- if sub_nodes is not None:
126
- op.sub_nodes = sub_nodes
127
- op.type = 'sub_flow'
128
- CATALOGS.setdefault(env, {})
129
- CATALOGS[env][name] = op
130
- func.__op__ = op
131
- return func
132
- return decorator
 
 
 
 
 
 
 
 
 
133
 
134
  def input_position(**kwargs):
135
- '''Decorator for specifying unusual positions for the inputs.'''
136
- def decorator(func):
137
- op = func.__op__
138
- for k, v in kwargs.items():
139
- op.inputs[k].position = v
140
- return func
141
- return decorator
 
 
 
142
 
143
  def output_position(**kwargs):
144
- '''Decorator for specifying unusual positions for the outputs.'''
145
- def decorator(func):
146
- op = func.__op__
147
- for k, v in kwargs.items():
148
- op.outputs[k].position = v
149
- return func
150
- return decorator
 
 
 
151
 
152
  def no_op(*args, **kwargs):
153
- if args:
154
- return args[0]
155
- return None
156
-
157
- def register_passive_op(env: str, name: str, inputs=[], outputs=['output'], params=[]):
158
- '''A passive operation has no associated code.'''
159
- op = Op(
160
- func=no_op,
161
- name=name,
162
- params={p.name: p for p in params},
163
- inputs=dict(
164
- (i, Input(name=i, type=None)) if isinstance(i, str)
165
- else (i.name, i) for i in inputs),
166
- outputs=dict(
167
- (o, Output(name=o, type=None)) if isinstance(o, str)
168
- else (o.name, o) for o in outputs))
169
- CATALOGS.setdefault(env, {})
170
- CATALOGS[env][name] = op
171
- return op
 
 
 
 
 
172
 
173
  def register_executor(env: str):
174
- '''Decorator for registering an executor.'''
175
- def decorator(func):
176
- EXECUTORS[env] = func
177
- return func
178
- return decorator
 
 
 
179
 
180
  def op_registration(env: str):
181
- return functools.partial(op, env)
 
182
 
183
  def passive_op_registration(env: str):
184
- return functools.partial(register_passive_op, env)
 
185
 
186
  def register_area(env, name, params=[]):
187
- '''A node that represents an area. It can contain other nodes, but does not restrict movement in any way.'''
188
- op = Op(func=no_op, name=name, params={p.name: p for p in params}, inputs={}, outputs={}, type='area')
189
- CATALOGS[env][name] = op
 
 
 
 
 
 
 
 
1
+ """API for implementing LynxKite operations."""
2
+
3
  from __future__ import annotations
4
  import enum
5
  import functools
 
11
  CATALOGS = {}
12
  EXECUTORS = {}
13
 
14
+ typeof = type # We have some arguments called "type".
15
+
16
+
17
  def type_to_json(t):
18
+ if isinstance(t, type) and issubclass(t, enum.Enum):
19
+ return {"enum": list(t.__members__.keys())}
20
+ if getattr(t, "__metadata__", None):
21
+ return t.__metadata__[-1]
22
+ return {"type": str(t)}
23
+
24
+
25
+ Type = Annotated[typing.Any, pydantic.PlainSerializer(type_to_json, return_type=dict)]
26
+ LongStr = Annotated[str, {"format": "textarea"}]
27
+ PathStr = Annotated[str, {"format": "path"}]
28
+ CollapsedStr = Annotated[str, {"format": "collapsed"}]
29
+ NodeAttribute = Annotated[str, {"format": "node attribute"}]
30
+ EdgeAttribute = Annotated[str, {"format": "edge attribute"}]
31
+
32
+
 
 
 
 
 
 
 
 
33
  class BaseConfig(pydantic.BaseModel):
34
+ model_config = pydantic.ConfigDict(
35
+ arbitrary_types_allowed=True,
36
+ )
37
 
38
 
39
  class Parameter(BaseConfig):
40
+ """Defines a parameter for an operation."""
41
+
42
+ name: str
43
+ default: typing.Any
44
+ type: Type = None
45
+
46
+ @staticmethod
47
+ def options(name, options, default=None):
48
+ e = enum.Enum(f"OptionsFor_{name}", options)
49
+ return Parameter.basic(name, e[default or options[0]], e)
50
+
51
+ @staticmethod
52
+ def collapsed(name, default, type=None):
53
+ return Parameter.basic(name, default, CollapsedStr)
54
+
55
+ @staticmethod
56
+ def basic(name, default=None, type=None):
57
+ if default is inspect._empty:
58
+ default = None
59
+ if type is None or type is inspect._empty:
60
+ type = typeof(default) if default is not None else None
61
+ return Parameter(name=name, default=default, type=type)
62
+
63
 
64
  class Input(BaseConfig):
65
+ name: str
66
+ type: Type
67
+ position: str = "left"
68
+
69
 
70
  class Output(BaseConfig):
71
+ name: str
72
+ type: Type
73
+ position: str = "right"
74
+
75
+
76
+ MULTI_INPUT = Input(name="multi", type="*")
77
+
78
 
 
79
  def basic_inputs(*names):
80
+ return {name: Input(name=name, type=None) for name in names}
81
+
82
+
83
  def basic_outputs(*names):
84
+ return {name: Output(name=name, type=None) for name in names}
85
 
86
 
87
  class Op(BaseConfig):
88
+ func: typing.Callable = pydantic.Field(exclude=True)
89
+ name: str
90
+ params: dict[str, Parameter]
91
+ inputs: dict[str, Input]
92
+ outputs: dict[str, Output]
93
+ type: str = "basic" # The UI to use for this operation.
94
+
95
+ def __call__(self, *inputs, **params):
96
+ # Convert parameters.
97
+ for p in params:
98
+ if p in self.params:
99
+ if self.params[p].type == int:
100
+ params[p] = int(params[p])
101
+ elif self.params[p].type == float:
102
+ params[p] = float(params[p])
103
+ elif isinstance(self.params[p].type, enum.EnumMeta):
104
+ params[p] = self.params[p].type[params[p]]
105
+ res = self.func(*inputs, **params)
106
+ return res
107
+
108
+
109
+ def op(env: str, name: str, *, view="basic", outputs=None):
110
+ """Decorator for defining an operation."""
111
+
112
+ def decorator(func):
113
+ sig = inspect.signature(func)
114
+ # Positional arguments are inputs.
115
+ inputs = {
116
+ name: Input(name=name, type=param.annotation)
117
+ for name, param in sig.parameters.items()
118
+ if param.kind != param.KEYWORD_ONLY
119
+ }
120
+ params = {}
121
+ for n, param in sig.parameters.items():
122
+ if param.kind == param.KEYWORD_ONLY and not n.startswith("_"):
123
+ params[n] = Parameter.basic(n, param.default, param.annotation)
124
+ if outputs:
125
+ _outputs = {name: Output(name=name, type=None) for name in outputs}
126
+ else:
127
+ _outputs = (
128
+ {"output": Output(name="output", type=None)} if view == "basic" else {}
129
+ )
130
+ op = Op(
131
+ func=func,
132
+ name=name,
133
+ params=params,
134
+ inputs=inputs,
135
+ outputs=_outputs,
136
+ type=view,
137
+ )
138
+ CATALOGS.setdefault(env, {})
139
+ CATALOGS[env][name] = op
140
+ func.__op__ = op
141
+ return func
142
+
143
+ return decorator
144
+
145
 
146
  def input_position(**kwargs):
147
+ """Decorator for specifying unusual positions for the inputs."""
148
+
149
+ def decorator(func):
150
+ op = func.__op__
151
+ for k, v in kwargs.items():
152
+ op.inputs[k].position = v
153
+ return func
154
+
155
+ return decorator
156
+
157
 
158
  def output_position(**kwargs):
159
+ """Decorator for specifying unusual positions for the outputs."""
160
+
161
+ def decorator(func):
162
+ op = func.__op__
163
+ for k, v in kwargs.items():
164
+ op.outputs[k].position = v
165
+ return func
166
+
167
+ return decorator
168
+
169
 
170
  def no_op(*args, **kwargs):
171
+ if args:
172
+ return args[0]
173
+ return None
174
+
175
+
176
+ def register_passive_op(env: str, name: str, inputs=[], outputs=["output"], params=[]):
177
+ """A passive operation has no associated code."""
178
+ op = Op(
179
+ func=no_op,
180
+ name=name,
181
+ params={p.name: p for p in params},
182
+ inputs=dict(
183
+ (i, Input(name=i, type=None)) if isinstance(i, str) else (i.name, i)
184
+ for i in inputs
185
+ ),
186
+ outputs=dict(
187
+ (o, Output(name=o, type=None)) if isinstance(o, str) else (o.name, o)
188
+ for o in outputs
189
+ ),
190
+ )
191
+ CATALOGS.setdefault(env, {})
192
+ CATALOGS[env][name] = op
193
+ return op
194
+
195
 
196
  def register_executor(env: str):
197
+ """Decorator for registering an executor."""
198
+
199
+ def decorator(func):
200
+ EXECUTORS[env] = func
201
+ return func
202
+
203
+ return decorator
204
+
205
 
206
  def op_registration(env: str):
207
+ return functools.partial(op, env)
208
+
209
 
210
  def passive_op_registration(env: str):
211
+ return functools.partial(register_passive_op, env)
212
+
213
 
214
  def register_area(env, name, params=[]):
215
+ """A node that represents an area. It can contain other nodes, but does not restrict movement in any way."""
216
+ op = Op(
217
+ func=no_op,
218
+ name=name,
219
+ params={p.name: p for p in params},
220
+ inputs={},
221
+ outputs={},
222
+ type="area",
223
+ )
224
+ CATALOGS[env][name] = op
server/workspace.py CHANGED
@@ -1,4 +1,5 @@
1
- '''For working with LynxKite workspaces.'''
 
2
  from typing import Optional
3
  import dataclasses
4
  import os
@@ -6,15 +7,18 @@ import pydantic
6
  import tempfile
7
  from . import ops
8
 
 
9
  class BaseConfig(pydantic.BaseModel):
10
  model_config = pydantic.ConfigDict(
11
- extra='allow',
12
  )
13
 
 
14
  class Position(BaseConfig):
15
  x: float
16
  y: float
17
 
 
18
  class WorkspaceNodeData(BaseConfig):
19
  title: str
20
  params: dict
@@ -23,12 +27,13 @@ class WorkspaceNodeData(BaseConfig):
23
  # Also contains a "meta" field when going out.
24
  # This is ignored when coming back from the frontend.
25
 
 
26
  class WorkspaceNode(BaseConfig):
27
  id: str
28
  type: str
29
  data: WorkspaceNodeData
30
  position: Position
31
- parentId: Optional[str] = None
32
 
33
  class WorkspaceEdge(BaseConfig):
34
  id: str
@@ -37,8 +42,9 @@ class WorkspaceEdge(BaseConfig):
37
  sourceHandle: str
38
  targetHandle: str
39
 
 
40
  class Workspace(BaseConfig):
41
- env: str = ''
42
  nodes: list[WorkspaceNode] = dataclasses.field(default_factory=list)
43
  edges: list[WorkspaceEdge] = dataclasses.field(default_factory=list)
44
 
@@ -52,7 +58,9 @@ def save(ws: Workspace, path: str):
52
  j = ws.model_dump_json(indent=2)
53
  dirname, basename = os.path.split(path)
54
  # Create temp file in the same directory to make sure it's on the same filesystem.
55
- with tempfile.NamedTemporaryFile('w', prefix=f'.{basename}.', dir=dirname, delete_on_close=False) as f:
 
 
56
  f.write(j)
57
  f.close()
58
  os.replace(f.name, path)
@@ -76,22 +84,13 @@ def _update_metadata(ws):
76
  if node.id in done:
77
  continue
78
  data = node.data
79
- if node.parentId is None:
80
- op = catalog.get(data.title)
81
- elif node.parentId not in nodes:
82
- data.error = f'Parent not found: {node.parentId}'
83
- done.add(node.id)
84
- continue
85
- elif node.parentId in done:
86
- op = nodes[node.parentId].data.meta.sub_nodes[data.title]
87
- else:
88
- continue
89
  if op:
90
  data.meta = op
91
  node.type = op.type
92
- if data.error == 'Unknown operation.':
93
  data.error = None
94
  else:
95
- data.error = 'Unknown operation.'
96
  done.add(node.id)
97
  return ws
 
1
+ """For working with LynxKite workspaces."""
2
+
3
  from typing import Optional
4
  import dataclasses
5
  import os
 
7
  import tempfile
8
  from . import ops
9
 
10
+
11
  class BaseConfig(pydantic.BaseModel):
12
  model_config = pydantic.ConfigDict(
13
+ extra="allow",
14
  )
15
 
16
+
17
  class Position(BaseConfig):
18
  x: float
19
  y: float
20
 
21
+
22
  class WorkspaceNodeData(BaseConfig):
23
  title: str
24
  params: dict
 
27
  # Also contains a "meta" field when going out.
28
  # This is ignored when coming back from the frontend.
29
 
30
+
31
  class WorkspaceNode(BaseConfig):
32
  id: str
33
  type: str
34
  data: WorkspaceNodeData
35
  position: Position
36
+
37
 
38
  class WorkspaceEdge(BaseConfig):
39
  id: str
 
42
  sourceHandle: str
43
  targetHandle: str
44
 
45
+
46
  class Workspace(BaseConfig):
47
+ env: str = ""
48
  nodes: list[WorkspaceNode] = dataclasses.field(default_factory=list)
49
  edges: list[WorkspaceEdge] = dataclasses.field(default_factory=list)
50
 
 
58
  j = ws.model_dump_json(indent=2)
59
  dirname, basename = os.path.split(path)
60
  # Create temp file in the same directory to make sure it's on the same filesystem.
61
+ with tempfile.NamedTemporaryFile(
62
+ "w", prefix=f".{basename}.", dir=dirname, delete_on_close=False
63
+ ) as f:
64
  f.write(j)
65
  f.close()
66
  os.replace(f.name, path)
 
84
  if node.id in done:
85
  continue
86
  data = node.data
87
+ op = catalog.get(data.title)
 
 
 
 
 
 
 
 
 
88
  if op:
89
  data.meta = op
90
  node.type = op.type
91
+ if data.error == "Unknown operation.":
92
  data.error = None
93
  else:
94
+ data.error = "Unknown operation."
95
  done.add(node.id)
96
  return ws
web/src/apiTypes.ts CHANGED
@@ -24,7 +24,6 @@ export interface WorkspaceNode {
24
  type: string;
25
  data: WorkspaceNodeData;
26
  position: Position;
27
- parentId?: string | null;
28
  [k: string]: unknown;
29
  }
30
  export interface WorkspaceNodeData {
 
24
  type: string;
25
  data: WorkspaceNodeData;
26
  position: Position;
 
27
  [k: string]: unknown;
28
  }
29
  export interface WorkspaceNodeData {
web/src/index.css CHANGED
@@ -15,7 +15,8 @@
15
  background: #002a4c;
16
  }
17
 
18
- img, svg {
 
19
  display: inline-block;
20
  }
21
 
@@ -156,6 +157,43 @@ body {
156
  line-height: 10px;
157
  }
158
  }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
159
  }
160
 
161
  .directory {
@@ -265,29 +303,29 @@ path.react-flow__edge-path {
265
  stroke-width: 2;
266
  stroke: black;
267
  }
 
268
  .react-flow__edge.selected path.react-flow__edge-path {
269
  outline: var(--xy-selection-border, var(--xy-selection-border-default));
270
  outline-offset: 10px;
271
  border-radius: 1px;
272
  }
 
273
  .react-flow__handle {
274
  border-color: black;
275
  background: white;
276
  width: 10px;
277
  height: 10px;
278
  }
 
279
  .react-flow__arrowhead * {
280
  stroke: none;
281
  fill: black;
282
  }
283
- // We want the area node to be above the sub-flow node if its inside the sub-flow.
284
- // This will need some more thinking for a general solution.
285
- .react-flow__node-sub_flow {
286
- z-index: -20 !important;
287
- }
288
  .react-flow__node-area {
289
  z-index: -10 !important;
290
  }
 
291
  .selected .lynxkite-node {
292
  outline: var(--xy-selection-border, var(--xy-selection-border-default));
293
  outline-offset: 7.5px;
 
15
  background: #002a4c;
16
  }
17
 
18
+ img,
19
+ svg {
20
  display: inline-block;
21
  }
22
 
 
157
  line-height: 10px;
158
  }
159
  }
160
+
161
+ .node-search {
162
+ position: fixed;
163
+ width: 300px;
164
+ z-index: 5;
165
+ padding: 4px;
166
+ border-radius: 4px;
167
+ border: 1px solid #888;
168
+ background-color: white;
169
+ max-height: -webkit-fill-available;
170
+ max-height: -moz-available;
171
+ display: flex;
172
+ flex-direction: column;
173
+
174
+ input {
175
+ width: calc(100% - 26px);
176
+ font-size: 20px;
177
+ padding: 8px;
178
+ border-radius: 4px;
179
+ border: 1px solid #eee;
180
+ margin: 4px;
181
+ }
182
+
183
+ .search-result {
184
+ padding: 4px;
185
+ cursor: pointer;
186
+ }
187
+
188
+ .search-result.selected {
189
+ background-color: oklch(75% 0.2 55);
190
+ border-radius: 4px;
191
+ }
192
+
193
+ .matches {
194
+ overflow-y: auto;
195
+ }
196
+ }
197
  }
198
 
199
  .directory {
 
303
  stroke-width: 2;
304
  stroke: black;
305
  }
306
+
307
  .react-flow__edge.selected path.react-flow__edge-path {
308
  outline: var(--xy-selection-border, var(--xy-selection-border-default));
309
  outline-offset: 10px;
310
  border-radius: 1px;
311
  }
312
+
313
  .react-flow__handle {
314
  border-color: black;
315
  background: white;
316
  width: 10px;
317
  height: 10px;
318
  }
319
+
320
  .react-flow__arrowhead * {
321
  stroke: none;
322
  fill: black;
323
  }
324
+
 
 
 
 
325
  .react-flow__node-area {
326
  z-index: -10 !important;
327
  }
328
+
329
  .selected .lynxkite-node {
330
  outline: var(--xy-selection-border, var(--xy-selection-border-default));
331
  outline-offset: 7.5px;