@@ -101,39 +101,73 @@ function openOrRevealUploadPanel(
101101 } ) ;
102102}
103103
104+ /**
105+ * Size threshold (in bytes) above which to use chunked upload.
106+ * Files over 100 MB require chunked upload to avoid 413 errors.
107+ * We use a lower threshold (20 MB) for better reliability.
108+ */
109+ const CHUNKED_UPLOAD_THRESHOLD = 20 * 1024 * 1024 ; // 20 MB
110+
111+ /**
112+ * Chunk size for chunked uploads. 6 MB is a good balance between
113+ * network resilience and upload speed.
114+ */
115+ const UPLOAD_CHUNK_SIZE = 6 * 1024 * 1024 ; // 6 MB
116+
104117/**
105118 * Uploads a file to Cloudinary with progress tracking.
119+ * Uses chunked upload for large files to avoid 413 errors.
106120 */
107121async function uploadWithProgress (
108122 panel : vscode . WebviewPanel ,
109123 dataUri : string ,
110124 options : Record < string , any > ,
111125 fileId : string
112126) : Promise < any > {
127+ // Convert data URI to buffer
128+ const base64Data = dataUri . split ( "," ) [ 1 ] ;
129+ const buffer = Buffer . from ( base64Data , "base64" ) ;
130+
131+ // Use chunked upload for large files to avoid 413 errors
132+ const useChunkedUpload = buffer . length > CHUNKED_UPLOAD_THRESHOLD ;
133+
113134 return new Promise ( ( resolve , reject ) => {
114- const uploadStream = cloudinary . uploader . upload_stream (
115- options ,
116- ( error , result ) => {
117- if ( error ) {
118- reject ( error ) ;
119- } else {
120- resolve ( result ) ;
135+ let uploadStream ;
136+
137+ if ( useChunkedUpload ) {
138+ // Use chunked upload for large files
139+ uploadStream = cloudinary . uploader . upload_chunked_stream (
140+ { ...options , chunk_size : UPLOAD_CHUNK_SIZE } ,
141+ ( error , result ) => {
142+ if ( error ) {
143+ reject ( error ) ;
144+ } else {
145+ resolve ( result ) ;
146+ }
121147 }
122- }
123- ) ;
124-
125- // Convert data URI to buffer
126- const base64Data = dataUri . split ( "," ) [ 1 ] ;
127- const buffer = Buffer . from ( base64Data , "base64" ) ;
148+ ) ;
149+ } else {
150+ // Use standard upload stream for smaller files
151+ uploadStream = cloudinary . uploader . upload_stream (
152+ options ,
153+ ( error , result ) => {
154+ if ( error ) {
155+ reject ( error ) ;
156+ } else {
157+ resolve ( result ) ;
158+ }
159+ }
160+ ) ;
161+ }
128162
129163 // Create readable stream with progress tracking
130164 let uploaded = 0 ;
131165 const total = buffer . length ;
132- const chunkSize = 64 * 1024 ; // 64KB chunks
166+ const progressChunkSize = 64 * 1024 ; // 64KB chunks for progress reporting
133167
134168 const readable = new Readable ( {
135169 read ( ) {
136- const chunk = buffer . slice ( uploaded , uploaded + chunkSize ) ;
170+ const chunk = buffer . slice ( uploaded , uploaded + progressChunkSize ) ;
137171 if ( chunk . length > 0 ) {
138172 uploaded += chunk . length ;
139173 const percent = Math . round ( ( uploaded / total ) * 100 ) ;
0 commit comments