44
55Once a Workflow Schedule is consumed by the Worker, the Workflow is executed on it and the atomic tasks will
66execute as defined."""
7+
78import json
89import pathlib
910import subprocess
2223
2324class WorkflowWorker :
2425 def __init__ (self ):
25- self ._rabbit = pika .BlockingConnection (pika .ConnectionParameters (
26- host = settings .rabbit_settings .rabbit_url ,
27- credentials = pika .PlainCredentials (settings .rabbit_settings .rabbit_user ,
28- settings .rabbit_settings .rabbit_password )
29- ))
26+ self ._rabbit = pika .BlockingConnection (
27+ pika .ConnectionParameters (
28+ host = settings .rabbit_settings .rabbit_url ,
29+ credentials = pika .PlainCredentials (
30+ settings .rabbit_settings .rabbit_user ,
31+ settings .rabbit_settings .rabbit_password ,
32+ ),
33+ )
34+ )
3035
3136 self ._workflow_registry = workflow_registry .WorkflowRegistry ()
3237
3338 def execute_workflow (self , ch , method , properties , body ):
3439 try :
3540 workflow_obj = workflow_pb2 .Workflow ()
36- ParseDict (json .loads (body .decode ()), workflow_obj , ignore_unknown_fields = True )
41+ ParseDict (
42+ json .loads (body .decode ()), workflow_obj , ignore_unknown_fields = True
43+ )
3744
3845 # Download and execute workflow
3946 with tempfile .TemporaryDirectory () as tmp_dir :
4047 workflow_file_path = pathlib .Path (tmp_dir ) / "workflow.pex"
41- self ._workflow_registry .s3_download_workflow_binary (workflow = workflow_obj , workflow_file_path = workflow_file_path )
48+ self ._workflow_registry .s3_download_workflow_binary (
49+ workflow = workflow_obj , workflow_file_path = workflow_file_path
50+ )
4251 if not os .path .exists (workflow_file_path ):
43- logger .error (f"Workflow file was not downloaded correctly at: { workflow_file_path } " )
52+ logger .error (
53+ f"Workflow file was not downloaded correctly at: { workflow_file_path } "
54+ )
4455
45- logger .debug (f"Downloaded workflow binary from S3: { workflow_obj .workflow_id } " )
56+ logger .debug (
57+ f"Downloaded workflow binary from S3: { workflow_obj .workflow_id } "
58+ )
4659
4760 # Execute Workflow
4861 result = subprocess .run (
4962 ["python3.10" , str (workflow_file_path )],
5063 stdout = subprocess .PIPE ,
51- stderr = subprocess .PIPE
64+ stderr = subprocess .PIPE ,
5265 )
5366
5467 logger .debug (result .stdout )
5568 logger .debug (result )
5669
57- ch .basic_ack (delivery_tag = method .delivery_tag ) # Acknowledge successful processing
70+ ch .basic_ack (
71+ delivery_tag = method .delivery_tag
72+ ) # Acknowledge successful processing
5873 except Exception as e :
5974 print (f"Error processing message: { e } " )
6075 ch .basic_nack (delivery_tag = method .delivery_tag , requeue = False )
@@ -66,10 +81,13 @@ def loop(self):
6681 channel .queue_declare (queue = queue_name , durable = True )
6782
6883 channel .basic_qos (prefetch_count = 1 ) # Ensure fair dispatch
69- channel .basic_consume (queue = queue_name , on_message_callback = self .execute_workflow )
84+ channel .basic_consume (
85+ queue = queue_name , on_message_callback = self .execute_workflow
86+ )
7087
7188 print (" [*] Waiting for messages. To exit press CTRL+C" )
7289 channel .start_consuming ()
7390
74- if __name__ == '__main__' :
91+
92+ if __name__ == "__main__" :
7593 WorkflowWorker ().loop ()
0 commit comments