Coverage for crunch/client/main.py: 100.00%
108 statements
« prev ^ index » next coverage.py v6.5.0, created at 2023-10-01 13:43 +0000
« prev ^ index » next coverage.py v6.5.0, created at 2023-10-01 13:43 +0000
1from pathlib import Path
2import typer
3from rich.console import Console
5console = Console()
7from crunch.django.app import storages
8from . import connections
9from .diagnostics import get_diagnostics
10from .enums import WorkflowType
11from .run import Run
14class NoDatasets(Exception):
15 """Raised when there are no more datasets to process on a crunch site."""
16 pass
19app = typer.Typer()
21url_arg = typer.Option(
22 ...,
23 envvar="CRUNCH_URL",
24 help="The URL for the endpoint for the project on the crunch hosted site.",
25 prompt=True,
26)
27token_arg = typer.Option(
28 ...,
29 envvar="CRUNCH_TOKEN",
30 help="An access token for a user on the hosted site.",
31 prompt=True,
32)
33dataset_slug_arg = typer.Argument(..., help="The slug for the dataset.")
34item_slug_arg = typer.Argument(..., help="The slug for the item.")
35storage_settings_arg = typer.Option(
36 ...,
37 envvar="CRUNCH_STORAGE_SETTINGS",
38 help="The path to a JSON or TOML file with the Django settings for the storage.",
39 prompt=True,
40)
41key_arg = typer.Argument(..., help="The key for this attribute.")
42value_arg = typer.Argument(..., help="The value of this attribute.")
43directory_arg = typer.Option(
44 "./tmp", help="The path to a directory to store the temporary files."
45)
46cores_arg = typer.Option(
47 "1",
48 help="The maximum number of cores/jobs to use to run the workflow. If 'all' then it uses all available cores.",
49)
50workflow_type_arg = typer.Option("snakemake", help="Workflow type (snakemake/script).")
51path_arg = typer.Option(None, help="The path to the workflow file.")
52download_arg = typer.Option(True, help="Whether or not to download the data from storage in the setup of a run.")
53upload_arg = typer.Option(True, help="Whether or not to upload the data to storage after a run.")
54cleanup_arg = typer.Option(False, help="Whether or not to delete the local data after a run.")
56@app.command()
57def run(
58 dataset: str = dataset_slug_arg,
59 storage_settings: Path = storage_settings_arg,
60 directory: Path = directory_arg,
61 cores: str = cores_arg,
62 url: str = url_arg,
63 token: str = token_arg,
64 workflow: WorkflowType = workflow_type_arg,
65 path: Path = path_arg,
66 download:bool = download_arg,
67 upload:bool = upload_arg,
68 cleanup:bool = cleanup_arg,
69):
70 """
71 Processes a dataset.
72 """
73 r = Run(
74 connection=connections.Connection(url, token),
75 dataset_slug=dataset,
76 working_directory=Path(directory),
77 workflow_type=workflow,
78 storage_settings=storage_settings,
79 workflow_path=path,
80 cores=cores,
81 download_from_storage=download,
82 upload_to_storage=upload,
83 cleanup=cleanup,
84 )
86 r()
89@app.command()
90def next(
91 storage_settings: Path = storage_settings_arg,
92 directory: Path = directory_arg,
93 cores: str = cores_arg,
94 url: str = url_arg,
95 token: str = token_arg,
96 project: str = typer.Option(
97 "",
98 help="The slug for a project the dataset is in. If not given, then it chooses any project.",
99 ),
100 workflow: WorkflowType = workflow_type_arg,
101 path: Path = path_arg,
102 download:bool = download_arg,
103 upload:bool = upload_arg,
104 cleanup:bool = cleanup_arg,
105):
106 """
107 Processes the next dataset in a project.
108 """
109 console.print(f"Processing the next dataset from {url}")
111 connection = connections.Connection(url, token)
113 next_url = f"api/projects/{project}/next/" if project else "api/next/"
114 next = connection.get_json_response(next_url)
116 if "dataset" in next and "project" in next and next["project"] and next["dataset"]:
117 run(
118 dataset=next["dataset"],
119 storage_settings=storage_settings,
120 url=url,
121 token=token,
122 directory=directory,
123 cores=cores,
124 workflow=workflow,
125 path=path,
126 download=download,
127 upload=upload,
128 cleanup=cleanup,
129 )
130 else:
131 console.print("No more datasets to process.")
132 raise NoDatasets
135@app.command()
136def loop(
137 storage_settings: Path = storage_settings_arg,
138 directory: Path = directory_arg,
139 cores: str = cores_arg,
140 url: str = url_arg,
141 token: str = token_arg,
142 project: str = typer.Option(
143 "",
144 help="The slug for a project the dataset is in. If not given, then it chooses any project.",
145 ),
146 workflow: WorkflowType = workflow_type_arg,
147 path: Path = path_arg,
148 download:bool = download_arg,
149 upload:bool = upload_arg,
150 cleanup:bool = cleanup_arg,
151):
152 """
153 Loops through all the datasets in a project and stops when complete.
154 """
155 console.print(
156 f"Looping through all the datasets from {url} and will stop when complete."
157 )
158 while True:
159 try:
160 next(
161 url=url,
162 token=token,
163 storage_settings=storage_settings,
164 directory=directory,
165 cores=cores,
166 workflow=workflow,
167 path=path,
168 project=project,
169 download=download,
170 upload=upload,
171 cleanup=cleanup,
172 )
173 except NoDatasets:
174 console.print("Loop concluded.")
175 break
178@app.command()
179def add_project(
180 project: str = typer.Argument(..., help="The name of the new crunch project."),
181 description: str = typer.Option(
182 "", help="A brief description of this new project."
183 ),
184 details: str = typer.Option(
185 "", help="A long description of this project in Markdown format."
186 ),
187 verbose: bool = True,
188 url: str = url_arg,
189 token: str = token_arg,
190):
191 """
192 Adds a new project on the hosted site.
193 """
194 connection = connections.Connection(url, token, verbose=verbose)
195 return connection.add_project(
196 project=project, description=description, details=details
197 )
200@app.command()
201def add_dataset(
202 project: str = typer.Argument(
203 ..., help="The slug of the project that this dataset is to be added to."
204 ),
205 dataset: str = typer.Argument(..., help="The name of the new dataset"),
206 description: str = typer.Option(
207 "", help="A brief description of this new dataset."
208 ),
209 details: str = typer.Option(
210 "", help="A long description of this dataset in Markdown format."
211 ),
212 verbose: bool = True,
213 url: str = url_arg,
214 token: str = token_arg,
215):
216 """
217 Adds a new dataset to a project on the hosted site.
218 """
219 connection = connections.Connection(url, token, verbose=verbose)
220 return connection.add_dataset(
221 project=project, dataset=dataset, description=description, details=details
222 )
225@app.command()
226def add_item(
227 parent: str = typer.Argument(
228 ..., help="The slug of the item that this item is to be added to."
229 ),
230 item: str = typer.Argument(..., help="The name of the new item"),
231 description: str = typer.Option("", help="A brief description of this new item."),
232 details: str = typer.Option(
233 "", help="A long description of this item in Markdown format."
234 ),
235 verbose: bool = True,
236 url: str = url_arg,
237 token: str = token_arg,
238):
239 """
240 Adds a new item to an item on the hosted site.
241 """
242 connection = connections.Connection(url, token, verbose=verbose)
243 return connection.add_item(
244 parent=parent, item=item, description=description, details=details
245 )
248@app.command()
249def add_char_attribute(
250 item: str = item_slug_arg,
251 key: str = key_arg,
252 value: str = value_arg,
253 verbose: bool = True,
254 url: str = url_arg,
255 token: str = token_arg,
256):
257 """
258 Adds a new char attribute to an item.
259 """
260 connection = connections.Connection(url, token, verbose=verbose)
261 return connection.add_char_attribute(item=item, key=key, value=value)
264@app.command()
265def add_float_attribute(
266 item: str = item_slug_arg,
267 key: str = key_arg,
268 value: float = value_arg,
269 verbose: bool = True,
270 url: str = url_arg,
271 token: str = token_arg,
272):
273 """
274 Adds a new float attribute to an item.
275 """
276 connection = connections.Connection(url, token, verbose=verbose)
277 return connection.add_float_attribute(item=item, key=key, value=value)
280@app.command()
281def add_datetime_attribute(
282 item: str = item_slug_arg,
283 key: str = key_arg,
284 value: str = value_arg,
285 format: str = "",
286 verbose: bool = True,
287 url: str = url_arg,
288 token: str = token_arg,
289):
290 """
291 Adds a new datetime attribute to an item.
292 """
293 connection = connections.Connection(url, token, verbose=verbose)
294 return connection.add_datetime_attribute(
295 item=item, key=key, value=value, format=format
296 )
299@app.command()
300def add_date_attribute(
301 item: str = item_slug_arg,
302 key: str = key_arg,
303 value: str = value_arg,
304 format: str = "",
305 verbose: bool = True,
306 url: str = url_arg,
307 token: str = token_arg,
308):
309 """
310 Adds a new date attribute to an item.
311 """
312 connection = connections.Connection(url, token, verbose=verbose)
313 return connection.add_date_attribute(item=item, key=key, value=value, format=format)
316@app.command()
317def add_lat_long_attribute(
318 item: str = item_slug_arg,
319 key: str = key_arg,
320 latitude: str = typer.Argument(
321 ..., help="The latitude of the coordinate in decimal degrees."
322 ),
323 longitude: str = typer.Argument(
324 ..., help="The longitude of the coordinate in decimal degrees."
325 ),
326 verbose: bool = True,
327 url: str = url_arg,
328 token: str = token_arg,
329):
330 """
331 Adds a new lat-long coorinate as an attribute to an item.
332 """
333 connection = connections.Connection(url, token, verbose=verbose)
334 return connection.add_lat_long_attribute(
335 item=item, key=key, latitude=latitude, longitude=longitude
336 )
339@app.command()
340def add_integer_attribute(
341 item: str = item_slug_arg,
342 key: str = key_arg,
343 value: int = value_arg,
344 verbose: bool = True,
345 url: str = url_arg,
346 token: str = token_arg,
347):
348 """
349 Adds a new integer attribute to an item.
350 """
351 connection = connections.Connection(url, token, verbose=verbose)
352 return connection.add_integer_attribute(item=item, key=key, value=value)
355@app.command()
356def add_filesize_attribute(
357 item: str = item_slug_arg,
358 key: str = key_arg,
359 value: int = value_arg,
360 verbose: bool = True,
361 url: str = url_arg,
362 token: str = token_arg,
363):
364 """
365 Adds a new filesize attribute to an item.
366 """
367 connection = connections.Connection(url, token, verbose=verbose)
368 return connection.add_filesize_attribute(item=item, key=key, value=value)
371@app.command()
372def add_boolean_attribute(
373 item: str = item_slug_arg,
374 key: str = key_arg,
375 value: bool = value_arg,
376 verbose: bool = True,
377 url: str = url_arg,
378 token: str = token_arg,
379):
380 """
381 Adds a new boolean attribute to an item.
382 """
383 connection = connections.Connection(url, token, verbose=verbose)
384 return connection.add_boolean_attribute(item=item, key=key, value=value)
387@app.command()
388def add_url_attribute(
389 item: str = item_slug_arg,
390 key: str = key_arg,
391 value: str = value_arg,
392 verbose: bool = True,
393 url: str = url_arg,
394 token: str = token_arg,
395):
396 """
397 Adds a new URL attribute to an item.
398 """
399 connection = connections.Connection(url, token, verbose=verbose)
400 return connection.add_url_attribute(item=item, key=key, value=value)
403@app.command()
404def diagnostics():
405 """Display system diagnostics."""
406 console.print(get_diagnostics())
409@app.command()
410def files(
411 dataset: str = dataset_slug_arg,
412 storage_settings: Path = storage_settings_arg,
413 url: str = url_arg,
414 token: str = token_arg,
415):
416 """Displays the files for a dataset."""
417 connection = connections.Connection(url, token)
418 dataset_data = connection.get_json_response(f"/api/datasets/{dataset}/")
419 base_file_path = dataset_data.get("base_file_path")
421 storage = storages.get_storage_with_settings(storage_settings)
422 listing = storages.storage_walk(base_file_path, storage=storage)
423 console.print(listing.render())