@@ -70,65 +70,88 @@ def download3mfFromFTP(filename, destFile):
7070 ftp_host = PRINTER_IP
7171 ftp_user = "bblp"
7272 ftp_pass = PRINTER_CODE
73- remote_path = "/cache/" + filename
7473 local_path = destFile .name # 🔹 Download into the current directory
75- encoded_remote_path = urllib .parse .quote (remote_path )
76-
77- url = f"ftps://{ ftp_host } { encoded_remote_path } "
78-
79-
74+ base_name = os .path .basename (filename )
75+ remote_paths = [f"/cache/{ base_name } " , f"/{ base_name } " , f"/sdcard/{ base_name } " ]
8076
81- log (f"[DEBUG] Attempting file download of: { remote_path } " ) #Log attempted path
82-
83- # Setup a retry loop
84- # Try to prevent race condition where trying to access file before it is fully in cache, causing File not found errors
8577 max_retries = 6
86- for attempt in range (1 , max_retries + 1 ):
87- with open (local_path , "wb" ) as f :
88- c = setupPycurlConnection (ftp_user , ftp_pass )
78+ last_err_code = None
79+ path_count = len (remote_paths )
80+ # pycurl error codes we react to:
81+ # 7: could not connect, 28: timeout, 35: SSL connect error,
82+ # 52: empty reply, 55: send error, 56: recv error.
83+ # 78: remote file not found, 13: bad PASV/EPSV response, 9: access denied.
84+ reconnect_codes = {7 , 28 , 35 , 52 , 55 , 56 }
85+ c = setupPycurlConnection (ftp_user , ftp_pass )
86+ try :
87+ for attempt in range (1 , max_retries + 1 ):
88+ path_index = (attempt - 1 ) % path_count
89+ remote_path = remote_paths [path_index ]
90+ if not remote_path .startswith ("/" ):
91+ remote_path = "/" + remote_path
92+ encoded_remote_path = urllib .parse .quote (remote_path )
93+ url = f"ftps://{ ftp_host } { encoded_remote_path } "
94+
95+ log (f"[DEBUG] Attempting file download ({ path_index + 1 } /{ path_count } ): { remote_path } " ) # Log attempted path
96+
97+ with open (local_path , "wb" ) as f :
98+ try :
99+ c .setopt (c .URL , url )
100+ c .setopt (c .WRITEDATA , f )
101+ log (f"[DEBUG] Attempt { attempt } : Starting download of { remote_path } ..." )
102+ c .perform ()
103+ log ("[DEBUG] File successfully downloaded!" )
104+ return True
105+ except pycurl .error as e :
106+ last_err_code = e .args [0 ]
107+ if last_err_code in reconnect_codes :
108+ log (f"[WARNING] FTP connection error (code { last_err_code } ). Reconnecting..." )
109+ try :
110+ c .close ()
111+ except Exception :
112+ pass
113+ c = setupPycurlConnection (ftp_user , ftp_pass )
114+
115+ if last_err_code in (78 , 13 ):
116+ if attempt < max_retries :
117+ log (f"[WARNING] Transient FTP error (code { last_err_code } ). Retrying in 5s..." )
118+ time .sleep (5 )
119+ continue
120+ log ("[ERROR] Giving up after max retries for transient FTP errors." )
121+ break
122+ if last_err_code == 9 :
123+ log ("[DEBUG] Printer denied access to /cache path. Ensure external storage is setup to store print files in printer settings." )
124+ return False
125+ log (f"[ERROR] Fatal cURL error { last_err_code } : { e } " )
126+ return False
127+ finally :
128+ if c is not None :
89129 try :
90- c .setopt (c .URL , url )
91- # Set output to file
92- c .setopt (c .WRITEDATA , f )
93- log (f"[DEBUG] Attempt { attempt } : Starting download of { filename } ..." )
94-
95- # Perform the transfer
96- c .perform ()
97-
98- log ("[DEBUG] File successfully downloaded!" )
99130 c .close ()
100- return True # Exit function on success
131+ except Exception :
132+ pass
101133
102- # Error, check its just a file not found error before retry
103- except pycurl .error as e :
104- err_code = e .args [0 ]
105- c .close ()
106- # Retry on transient FTP issues: file not found (78) or bad PASV/EPSV responses (13/425).
107- if err_code in (78 , 13 ):
108- if attempt < max_retries :
109- log (f"[WARNING] Transient FTP error (code { err_code } ). Retrying in 5s..." )
110- time .sleep (5 )
111- continue
112- log ("[ERROR] Giving up after max retries for transient FTP errors." )
113- if err_code == 78 :
114- log ("[DEBUG] Listing found printer files in /cache directory" )
115- buffer = io .BytesIO ()
116- c = setupPycurlConnection (ftp_user , ftp_pass )
117- c .setopt (c .URL , f"ftps://{ ftp_host } /cache/" )
118- c .setopt (c .WRITEDATA , buffer )
119- c .setopt (c .DIRLISTONLY , True )
120- try :
121- c .perform ()
122- log (f"[DEBUG] Directory Listing: { buffer .getvalue ().decode ('utf-8' ).splitlines ()} " )
123- except Exception :
124- log ("[ERROR] Could not retrieve directory listing." )
125- # Check if external storage not setup or connected. /cache is denied access
126- elif err_code == 9 : # Server denied you to change to the given directory
127- log ("[DEBUG] Printer denied access to /cache path. Ensure external storage is setup to store print files in printer settings." )
128- break
129- else :
130- log (f"[ERROR] Fatal cURL error { err_code } : { e } " )
131- break # Don't retry for other fatal errors
134+ if last_err_code == 78 :
135+ log ("[ERROR] File not found after max retries." )
136+ list_conn = setupPycurlConnection (ftp_user , ftp_pass )
137+ try :
138+ for list_path in ("/" , "/sdcard/" , "/cache/" ):
139+ log (f"[DEBUG] Listing found printer files in { list_path } directory" )
140+ buffer = io .BytesIO ()
141+ list_conn .setopt (list_conn .URL , f"ftps://{ ftp_host } { list_path } " )
142+ list_conn .setopt (list_conn .WRITEDATA , buffer )
143+ list_conn .setopt (list_conn .DIRLISTONLY , True )
144+ try :
145+ list_conn .perform ()
146+ log (f"[DEBUG] Directory Listing ({ list_path } ): { buffer .getvalue ().decode ('utf-8' ).splitlines ()} " )
147+ except Exception :
148+ log (f"[ERROR] Could not retrieve directory listing for { list_path } ." )
149+ finally :
150+ try :
151+ list_conn .close ()
152+ except Exception :
153+ pass
154+ return False
132155
133156def setupPycurlConnection (ftp_user , ftp_pass ):
134157 # Setup shared options for curl connections
@@ -176,6 +199,10 @@ def getMetaDataFrom3mf(url):
176199 download3mfFromCloud (url , temp_file )
177200 elif url .startswith ("local:" ):
178201 download3mfFromLocalFilesystem (url .replace ("local:" , "" ), temp_file )
202+ elif url .startswith (("file://" , "ftp://" , "ftps://" )):
203+ file_path = urlparse (url ).path
204+ filename = os .path .basename (file_path )
205+ download3mfFromFTP (filename , temp_file )
179206 else :
180207 download3mfFromFTP (url .rpartition ('/' )[- 1 ], temp_file ) # Pull just filename to clear out any unexpected paths
181208
0 commit comments