Spaces:
Running
Running
Merge branch 'main' into darabos-model-designer
Browse files
Dockerfile
CHANGED
@@ -5,12 +5,16 @@ USER node
|
|
5 |
ENV HOME=/home/node PATH=/home/node/.local/bin:$PATH
|
6 |
WORKDIR $HOME/app
|
7 |
COPY --chown=node . $HOME/app
|
8 |
-
|
|
|
|
|
9 |
-e lynxkite-core \
|
10 |
-e lynxkite-app \
|
11 |
-e lynxkite-graph-analytics \
|
12 |
-e lynxkite-bio \
|
13 |
-
-e lynxkite-
|
|
|
|
|
14 |
WORKDIR $HOME/app/examples
|
15 |
ENV PORT=7860
|
16 |
CMD ["uv", "run", "lynxkite"]
|
|
|
5 |
ENV HOME=/home/node PATH=/home/node/.local/bin:$PATH
|
6 |
WORKDIR $HOME/app
|
7 |
COPY --chown=node . $HOME/app
|
8 |
+
ENV GIT_SSH_COMMAND="ssh -i /run/secrets/LYNXSCRIBE_DEPLOY_KEY -o StrictHostKeyChecking=no"
|
9 |
+
RUN --mount=type=secret,id=LYNXSCRIBE_DEPLOY_KEY,mode=0444,required=true \
|
10 |
+
uv venv && uv pip install \
|
11 |
-e lynxkite-core \
|
12 |
-e lynxkite-app \
|
13 |
-e lynxkite-graph-analytics \
|
14 |
-e lynxkite-bio \
|
15 |
+
-e lynxkite-lynxscribe \
|
16 |
+
-e lynxkite-pillow-example \
|
17 |
+
chromadb openai
|
18 |
WORKDIR $HOME/app/examples
|
19 |
ENV PORT=7860
|
20 |
CMD ["uv", "run", "lynxkite"]
|
lynxkite-graph-analytics/src/lynxkite_graph_analytics/lynxkite_ops.py
CHANGED
@@ -15,7 +15,7 @@ import polars as pl
|
|
15 |
import json
|
16 |
|
17 |
|
18 |
-
mem = joblib.Memory("joblib-cache")
|
19 |
op = ops.op_registration(core.ENV)
|
20 |
|
21 |
|
@@ -87,8 +87,8 @@ def import_parquet(*, filename: str):
|
|
87 |
return pd.read_parquet(filename)
|
88 |
|
89 |
|
90 |
-
@mem.cache
|
91 |
@op("Import CSV")
|
|
|
92 |
def import_csv(
|
93 |
*, filename: str, columns: str = "<from file>", separator: str = "<auto>"
|
94 |
):
|
@@ -102,8 +102,8 @@ def import_csv(
|
|
102 |
)
|
103 |
|
104 |
|
105 |
-
@mem.cache
|
106 |
@op("Import GraphML")
|
|
|
107 |
def import_graphml(*, filename: str):
|
108 |
"""Imports a GraphML file."""
|
109 |
files = fsspec.open_files(filename, compression="infer")
|
@@ -114,8 +114,8 @@ def import_graphml(*, filename: str):
|
|
114 |
raise ValueError(f"No .graphml file found at {filename}")
|
115 |
|
116 |
|
117 |
-
@mem.cache
|
118 |
@op("Graph from OSM")
|
|
|
119 |
def import_osm(*, location: str):
|
120 |
import osmnx as ox
|
121 |
|
|
|
15 |
import json
|
16 |
|
17 |
|
18 |
+
mem = joblib.Memory(".joblib-cache")
|
19 |
op = ops.op_registration(core.ENV)
|
20 |
|
21 |
|
|
|
87 |
return pd.read_parquet(filename)
|
88 |
|
89 |
|
|
|
90 |
@op("Import CSV")
|
91 |
+
@mem.cache
|
92 |
def import_csv(
|
93 |
*, filename: str, columns: str = "<from file>", separator: str = "<auto>"
|
94 |
):
|
|
|
102 |
)
|
103 |
|
104 |
|
|
|
105 |
@op("Import GraphML")
|
106 |
+
@mem.cache
|
107 |
def import_graphml(*, filename: str):
|
108 |
"""Imports a GraphML file."""
|
109 |
files = fsspec.open_files(filename, compression="infer")
|
|
|
114 |
raise ValueError(f"No .graphml file found at {filename}")
|
115 |
|
116 |
|
|
|
117 |
@op("Graph from OSM")
|
118 |
+
@mem.cache
|
119 |
def import_osm(*, location: str):
|
120 |
import osmnx as ox
|
121 |
|