1- from typing import List , Optional
1+ from typing import List , Optional , Dict
22
33from cirro_api_client .v1 .api .execution import run_analysis , stop_analysis , get_project_summary , \
44 get_tasks_for_execution , get_task_logs , get_execution_logs
55from cirro_api_client .v1 .api .processes import get_process_parameters
6- from cirro_api_client .v1 .models import RunAnalysisRequest , CreateResponse , GetProjectSummaryResponse200 , Task
6+ from cirro_api_client .v1 .models import RunAnalysisRequest , CreateResponse , Task
77
88from cirro .models .form_specification import ParameterSpecification
99from cirro .services .base import BaseService
@@ -82,7 +82,7 @@ def stop_analysis(self, project_id: str, dataset_id: str):
8282 client = self ._api_client
8383 )
8484
85- def get_project_summary (self , project_id : str ) -> GetProjectSummaryResponse200 :
85+ def get_project_summary (self , project_id : str ) -> Dict [ str , List [ Task ]] :
8686 """
8787 Gets an overview of the executions currently running in the project, by job queue
8888
@@ -98,54 +98,63 @@ def get_project_summary(self, project_id: str) -> GetProjectSummaryResponse200:
9898 client = self ._api_client
9999 ).additional_properties
100100
101- def get_execution_logs (self , project_id : str , dataset_id : str ) -> str :
101+ def get_execution_logs (self , project_id : str , dataset_id : str , force_live = False ) -> str :
102102 """
103103 Gets live logs from main execution task
104104
105105 Args:
106106 project_id (str): ID of the Project
107107 dataset_id (str): ID of the Dataset
108+ force_live (bool): If True, it will fetch logs from CloudWatch,
109+ even if the execution is already completed
108110
109111 """
110112
111113 resp = get_execution_logs .sync (
112114 project_id = project_id ,
113115 dataset_id = dataset_id ,
116+ force_live = force_live ,
114117 client = self ._api_client
115118 )
116119
117120 return '\n ' .join (e .message for e in resp .events )
118121
119- def get_tasks_for_execution (self , project_id : str , dataset_id : str ) -> Optional [List [Task ]]:
122+ def get_tasks_for_execution (self , project_id : str , dataset_id : str , force_live = False ) -> Optional [List [Task ]]:
120123 """
121124 Gets the tasks submitted by the workflow execution
122125
123126 Args:
124127 project_id (str): ID of the Project
125128 dataset_id (str): ID of the Dataset
129+ force_live (bool): If True, it will try to get the list of jobs
130+ from the executor (i.e., AWS Batch), rather than the workflow report
126131 """
127132
128133 return get_tasks_for_execution .sync (
129134 project_id = project_id ,
130135 dataset_id = dataset_id ,
136+ force_live = force_live ,
131137 client = self ._api_client
132138 )
133139
134- def get_task_logs (self , project_id : str , dataset_id : str , task_id : str ) -> str :
140+ def get_task_logs (self , project_id : str , dataset_id : str , task_id : str , force_live = False ) -> str :
135141 """
136142 Gets the log output from an individual task
137143
138144 Args:
139145 project_id (str): ID of the Project
140146 dataset_id (str): ID of the Dataset
141147 task_id (str): ID of the task
148+ force_live (bool): If True, it will fetch logs from CloudWatch,
149+ even if the execution is already completed
142150
143151 """
144152
145153 resp = get_task_logs .sync (
146154 project_id = project_id ,
147155 dataset_id = dataset_id ,
148156 task_id = task_id ,
157+ force_live = force_live ,
149158 client = self ._api_client
150159 )
151160
0 commit comments