@@ -192,3 +192,39 @@ def test_foundry_runner_with_submitted_files_path(movies_test_files, temp_ddb_co
192192 assert fh .get_resource_exists (report_uri )
193193 assert len (list (fh .iter_prefix (output_loc ))) == 2
194194 assert len (list (fh .iter_prefix (audit_files ))) == 3
195+
196+
197+ def test_foundry_runner_error_at_bi_rules (movies_test_files , temp_ddb_conn ):
198+ # Missing refdata in the business rules should cause a handled failure
199+ db_file , conn = temp_ddb_conn
200+ processing_folder = Path (tempfile .mkdtemp ()).as_posix ()
201+ submitted_files_path = Path (movies_test_files ).as_posix ()
202+ sub_id = uuid4 ().hex
203+ sub_info = SubmissionInfo (
204+ submission_id = sub_id ,
205+ dataset_id = "movies" ,
206+ file_name = "good_movies" ,
207+ file_extension = "json" ,
208+ submitting_org = "TEST" ,
209+ datetime_received = datetime (2025 ,11 ,5 )
210+ )
211+
212+ DuckDBRefDataLoader .connection = conn
213+ DuckDBRefDataLoader .dataset_config_uri = None
214+
215+ with DDBAuditingManager (db_file .as_uri (), None , conn ) as audit_manager :
216+ dve_pipeline = FoundryDDBPipeline (
217+ processed_files_path = processing_folder ,
218+ audit_tables = audit_manager ,
219+ connection = conn ,
220+ rules_path = get_test_file_path ("movies/movies_ddb.dischema.json" ).as_posix (),
221+ submitted_files_path = submitted_files_path ,
222+ reference_data_loader = DuckDBRefDataLoader ,
223+ )
224+ output_loc , report_uri , audit_files = dve_pipeline .run_pipeline (sub_info )
225+
226+ assert Path (processing_folder , sub_id , sub_info .file_name_with_ext ).exists ()
227+ assert output_loc is None
228+ assert len (list (fh .iter_prefix (audit_files ))) == 2
229+ assert audit_manager .get_submission_status (sub_id ).processing_failed
230+ assert audit_manager .get_latest_processing_records ().select ("submission_result" ).pl ().to_dicts ()[0 ]["submission_result" ] == "processing_failed"
0 commit comments