Skip to content

Commit 6054fe9

Browse files
committed
Merge remote-tracking branch 'origin/release-v4.6' into develop
2 parents df693d5 + 1e89570 commit 6054fe9

File tree

9 files changed

+241
-23
lines changed

9 files changed

+241
-23
lines changed

.github/workflows/python-package.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,7 @@ jobs:
7777
fail-fast: true
7878
matrix:
7979
os: ["ubuntu-latest", "macos-latest", "windows-latest"]
80-
python-version: ["3.7", "3.12"]
80+
python-version: ["3.7", "3.13"]
8181
exclude:
8282
# Latest macos runner does not support older Python versions
8383
# https://github.com/actions/setup-python/issues/852
@@ -125,7 +125,7 @@ jobs:
125125
uses: docker/setup-qemu-action@v3
126126
with:
127127
platforms: arm64
128-
- uses: pypa/cibuildwheel@v2.20.0
128+
- uses: pypa/cibuildwheel@v2.21.3
129129
env:
130130
CIBW_SKIP: cp36-* pp*-win* pp*-macosx* *_i686
131131
CIBW_TEST_SKIP: "*-win_arm64"

CHANGELOG.md

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,11 @@
11
# Changelog
22

3+
## v4.6.1
4+
5+
- Added: Python 3.13 support
6+
- Fix: Disallow non-URL-encodable characters when creating external job inputs and outputs
7+
- Fix: Prevent queuing and killing external jobs (must use `job.start/job.stop()` or `with job.run()`)
8+
39
## v4.6.0
410

511
- Added: `Dataset.is_equivalent` method to check if two datasets have identical fields, but in a different order

cryosparc/__init__.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
__version__ = "4.6.0"
1+
__version__ = "4.6.1"
22

33

44
def get_include():

cryosparc/errors.py

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -78,3 +78,11 @@ def __init__(self, caller: str, validation: SlotsValidation):
7878
)
7979

8080
return super().__init__(msg)
81+
82+
83+
class ExternalJobError(Exception):
84+
"""
85+
Raised during external job lifecycle failures
86+
"""
87+
88+
pass

cryosparc/job.py

Lines changed: 35 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,8 @@
44

55
import json
66
import math
7+
import re
8+
import urllib.parse
79
from contextlib import contextmanager
810
from io import BytesIO
911
from pathlib import PurePath, PurePosixPath
@@ -15,7 +17,7 @@
1517

1618
from .command import CommandError, make_json_request, make_request
1719
from .dataset import DEFAULT_FORMAT, Dataset
18-
from .errors import InvalidSlotsError
20+
from .errors import ExternalJobError, InvalidSlotsError
1921
from .spec import (
2022
ASSET_CONTENT_TYPES,
2123
IMAGE_CONTENT_TYPES,
@@ -39,6 +41,12 @@
3941
from .tools import CryoSPARC
4042

4143

44+
GROUP_NAME_PATTERN = r"^[A-Za-z][0-9A-Za-z_]*$"
45+
"""
46+
Input and output result groups may only contain, letters, numbers and underscores.
47+
"""
48+
49+
4250
class Job(MongoController[JobDocument]):
4351
"""
4452
Accessor class to a job in CryoSPARC with ability to load inputs and
@@ -1233,6 +1241,11 @@ def add_input(
12331241
... )
12341242
"input_micrographs"
12351243
"""
1244+
if name and not re.fullmatch(GROUP_NAME_PATTERN, name):
1245+
raise ValueError(
1246+
f'Invalid input name "{name}"; may only contain letters, numbers and underscores, '
1247+
"and must start with a letter"
1248+
)
12361249
try:
12371250
self.cs.vis.add_external_job_input( # type: ignore
12381251
project_uid=self.project_uid,
@@ -1354,6 +1367,11 @@ def add_output(
13541367
... )
13551368
"particle_alignments"
13561369
"""
1370+
if name and not re.fullmatch(GROUP_NAME_PATTERN, name):
1371+
raise ValueError(
1372+
f'Invalid output name "{name}"; may only contain letters, numbers and underscores, '
1373+
"and must start with a letter"
1374+
)
13571375
try:
13581376
self.cs.vis.add_external_job_output( # type: ignore
13591377
project_uid=self.project_uid,
@@ -1519,7 +1537,8 @@ def save_output(self, name: str, dataset: Dataset, *, refresh: bool = True):
15191537
>>> job.save_output("picked_particles", particles)
15201538
15211539
"""
1522-
url = f"/external/projects/{self.project_uid}/jobs/{self.uid}/outputs/{name}/dataset"
1540+
1541+
url = f"/external/projects/{self.project_uid}/jobs/{self.uid}/outputs/{urllib.parse.quote_plus(name)}/dataset"
15231542
with make_request(self.cs.vis, url=url, data=dataset.stream(compression="lz4")) as res:
15241543
result = res.read().decode()
15251544
assert res.status >= 200 and res.status < 400, f"Save output failed with message: {result}"
@@ -1572,12 +1591,24 @@ def run(self):
15721591
"""
15731592
error = False
15741593
self.start("running")
1575-
self.refresh()
15761594
try:
15771595
yield self
15781596
except Exception:
15791597
error = True
15801598
raise
15811599
finally:
15821600
self.stop(error) # TODO: Write Error to job log, if possible
1583-
self.refresh()
1601+
1602+
def queue(
1603+
self,
1604+
lane: Optional[str] = None,
1605+
hostname: Optional[str] = None,
1606+
gpus: List[int] = [],
1607+
cluster_vars: Dict[str, Any] = {},
1608+
):
1609+
raise ExternalJobError(
1610+
"Cannot queue an external job; use `job.start()`/`job.stop()` or `with job.run()` instead"
1611+
)
1612+
1613+
def kill(self):
1614+
raise ExternalJobError("Cannot kill an external job; use `job.stop()` instead")

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[project]
22
name = "cryosparc-tools"
3-
version = "4.6.0"
3+
version = "4.6.1"
44
description = "Toolkit for interfacing with CryoSPARC"
55
readme = "README.md"
66
requires-python = ">=3.7"

setup.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@
2525

2626
setup(
2727
name="cryosparc_tools",
28-
version="4.6.0",
28+
version="4.6.1",
2929
description="Toolkit for interfacing with CryoSPARC",
3030
headers=["cryosparc/include/cryosparc-tools/dataset.h"],
3131
ext_modules=cythonize(

tests/conftest.py

Lines changed: 36 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -98,9 +98,13 @@ def shuffle(self):
9898
# fmt: on
9999

100100

101-
def request_callback_core(request, uri, response_headers):
102-
body = json.loads(request.body)
103-
procs = {
101+
@pytest.fixture
102+
def mock_jsonrpc_procs_core() -> Dict[str, Any]:
103+
"""
104+
Dictionary of JSON RPC method names and their return values. Can override
105+
existing values in subfixtures.
106+
"""
107+
return {
104108
"hello_world": {"hello": "world"},
105109
"get_running_version": "develop",
106110
"get_id_by_email_password": "6372a35e821ed2b71d9fe4e3",
@@ -253,17 +257,36 @@ def request_callback_core(request, uri, response_headers):
253257
"job_connect_group": True,
254258
"job_set_param": True,
255259
}
256-
procs["system.describe"] = {"procs": [{"name": m} for m in procs]}
257-
response_headers["content-type"] = "application/json"
258-
return [200, response_headers, json.dumps({"result": procs[body["method"]]})]
259260

260261

261-
def request_callback_vis(request, uri, response_headers):
262-
body = json.loads(request.body)
263-
procs: Dict[str, Any] = {"hello_world": {"hello": "world"}}
264-
procs["system.describe"] = {"procs": [{"name": m} for m in procs]}
265-
response_headers["content-type"] = "application/json"
266-
return [200, response_headers, json.dumps({"result": procs[body["method"]]})]
262+
@pytest.fixture
263+
def request_callback_core(mock_jsonrpc_procs_core):
264+
def request_callback_core_fn(request, uri, response_headers):
265+
body = json.loads(request.body)
266+
mock_jsonrpc_procs_core["system.describe"] = {"procs": [{"name": m} for m in mock_jsonrpc_procs_core]}
267+
response_headers["content-type"] = "application/json"
268+
return [200, response_headers, json.dumps({"result": mock_jsonrpc_procs_core[body["method"]]})]
269+
270+
return request_callback_core_fn
271+
272+
273+
@pytest.fixture
274+
def mock_jsonrpc_procs_vis() -> Dict[str, Any]:
275+
return {
276+
"hello_world": {"hello": "world"},
277+
}
278+
279+
280+
@pytest.fixture
281+
def request_callback_vis(mock_jsonrpc_procs_vis):
282+
def request_callback_vis_fn(request, uri, response_headers):
283+
body = json.loads(request.body)
284+
285+
mock_jsonrpc_procs_vis["system.describe"] = {"procs": [{"name": m} for m in mock_jsonrpc_procs_vis]}
286+
response_headers["content-type"] = "application/json"
287+
return [200, response_headers, json.dumps({"result": mock_jsonrpc_procs_vis[body["method"]]})]
288+
289+
return request_callback_vis_fn
267290

268291

269292
def request_callback_vis_get_project_file(request, uri, response_headers):
@@ -404,7 +427,7 @@ def t20s_particles_passthrough():
404427

405428

406429
@pytest.fixture
407-
def cs():
430+
def cs(request_callback_core, request_callback_vis):
408431
httpretty.enable(verbose=False, allow_net_connect=False)
409432
httpretty.register_uri(httpretty.POST, "http://localhost:39002/api", body=request_callback_core) # type: ignore
410433
httpretty.register_uri(httpretty.POST, "http://localhost:39003/api", body=request_callback_vis) # type: ignore

tests/test_job.py

Lines changed: 151 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,15 +4,60 @@
44
import pytest
55

66
from cryosparc.dataset import Dataset
7-
from cryosparc.job import Job
7+
from cryosparc.job import ExternalJob, Job
88
from cryosparc.project import Project
9+
from cryosparc.tools import CryoSPARC
10+
11+
from .conftest import T20S_PARTICLES
912

1013

1114
@pytest.fixture
1215
def job(cs, project: Project):
1316
return project.find_job("J1")
1417

1518

19+
@pytest.fixture
20+
def mock_external_job_doc():
21+
return {
22+
"_id": "67292e95282b26b45d0e8fee",
23+
"uid": "J2",
24+
"uid_num": 2,
25+
"project_uid": "P1",
26+
"project_uid_num": 1,
27+
"type": "snowflake",
28+
"job_type": "snowflake",
29+
"title": "Recenter Particles",
30+
"description": "Enter a description.",
31+
"status": "building",
32+
"created_at": "Mon, 04 Nov 2024 20:29:09 GMT",
33+
"created_by_user_id": "61f0383552d791f286b796ef",
34+
"parents": [],
35+
"children": [],
36+
"input_slot_groups": [],
37+
"output_result_groups": [],
38+
"output_results": [],
39+
"params_base": {},
40+
"params_spec": {},
41+
"params_secs": {},
42+
"workspace_uids": ["W1"],
43+
}
44+
45+
46+
@pytest.fixture
47+
def external_job(
48+
mock_jsonrpc_procs_vis,
49+
mock_jsonrpc_procs_core,
50+
mock_external_job_doc,
51+
cs: CryoSPARC,
52+
project: Project,
53+
):
54+
mock_jsonrpc_procs_vis["create_external_job"] = "J2"
55+
mock_jsonrpc_procs_core["get_job"] = mock_external_job_doc
56+
cs.cli()
57+
cs.vis()
58+
return project.create_external_job("W1", title="Recenter Particles")
59+
60+
1661
def test_queue(job: Job):
1762
job.queue()
1863
queue_request = httpretty.latest_requests()[-3]
@@ -104,3 +149,108 @@ def test_job_subprocess_io(job: Job):
104149
opt1 = {"project_uid": "P1", "job_uid": "J1", "message": "error", "error": False}
105150
opt2 = {"project_uid": "P1", "job_uid": "J1", "message": "world", "error": False}
106151
assert params == opt1 or params == opt2
152+
153+
154+
def test_create_external_job(cs: CryoSPARC, external_job: ExternalJob):
155+
requests = httpretty.latest_requests()
156+
create_external_job_request = requests[-3]
157+
create_external_job_body = create_external_job_request.parsed_body
158+
find_external_job_request = requests[-1]
159+
find_external_job_body = find_external_job_request.parsed_body
160+
161+
assert create_external_job_body["method"] == "create_external_job"
162+
assert create_external_job_body["params"] == {
163+
"project_uid": "P1",
164+
"workspace_uid": "W1",
165+
"user": cs.user_id,
166+
"title": "Recenter Particles",
167+
"desc": None,
168+
}
169+
assert find_external_job_body["method"] == "get_job"
170+
assert find_external_job_body["params"] == ["P1", "J2"]
171+
172+
173+
@pytest.fixture
174+
def external_job_output(mock_jsonrpc_procs_vis, mock_external_job_doc, cs: CryoSPARC, external_job: ExternalJob):
175+
mock_external_job_doc["output_result_groups"] = [
176+
{
177+
"uid": "J2-G1",
178+
"type": "particle",
179+
"name": "particles",
180+
"title": "Particles",
181+
"description": "",
182+
"contains": [
183+
{
184+
"uid": "J2-R1",
185+
"type": "particle.blob",
186+
"group_name": "particles",
187+
"name": "blob",
188+
"passthrough": False,
189+
},
190+
{
191+
"uid": "J2-R2",
192+
"type": "particle.ctf",
193+
"group_name": "particles",
194+
"name": "ctf",
195+
"passthrough": False,
196+
},
197+
],
198+
"passthrough": False,
199+
}
200+
]
201+
mock_external_job_doc["output_results"] = [
202+
{
203+
"uid": "J2-R1",
204+
"type": "particle.blob",
205+
"group_name": "particles",
206+
"name": "blob",
207+
"title": "",
208+
"description": "",
209+
"min_fields": [["path", "O"], ["idx", "u4"], ["shape", "2u4"], ["psize_A", "f4"], ["sign", "f4"]],
210+
"versions": [0],
211+
"metafiles": ["J2/particles.cs"],
212+
"num_items": [10],
213+
"passthrough": False,
214+
},
215+
{
216+
"uid": "J2-R2",
217+
"type": "particle.ctf",
218+
"group_name": "particles",
219+
"name": "ctf",
220+
"title": "",
221+
"description": "",
222+
"min_fields": [["type", "O"], ["exp_group_id", "u4"], ["accel_kv", "f4"], ["cs_mm", "f4"]],
223+
"versions": [0],
224+
"metafiles": ["J2/particles.cs"],
225+
"num_items": [10],
226+
"passthrough": False,
227+
},
228+
]
229+
mock_jsonrpc_procs_vis["add_external_job_output"] = "particles"
230+
httpretty.register_uri(
231+
httpretty.POST,
232+
"http://localhost:39003/external/projects/P1/jobs/J2/outputs/particles/dataset",
233+
body='"particles"',
234+
)
235+
236+
cs.vis()
237+
external_job.add_output("particle", name="particles", slots=["blob", "ctf"])
238+
external_job.save_output("particles", T20S_PARTICLES)
239+
return T20S_PARTICLES
240+
241+
242+
def test_external_job_output(external_job_output):
243+
requests = httpretty.latest_requests()
244+
create_output_request = requests[-3]
245+
find_external_job_request = requests[-1]
246+
find_external_job_body = find_external_job_request.parsed_body
247+
248+
assert len(external_job_output) > 0
249+
assert create_output_request.url == "http://localhost:39003/external/projects/P1/jobs/J2/outputs/particles/dataset"
250+
assert find_external_job_body["method"] == "get_job"
251+
assert find_external_job_body["params"] == ["P1", "J2"]
252+
253+
254+
def test_invalid_external_job_output(external_job):
255+
with pytest.raises(ValueError, match="Invalid output name"):
256+
external_job.add_output("particle", name="particles/1", slots=["blob", "ctf"])

0 commit comments

Comments
 (0)