You might see that the Dropbox Community team have been busy working on some major updates to the Community itself! So, here is some info on what’s changed, what’s staying the same and what you can expect from the Dropbox Community overall.

Forum Discussion

Andrewer016's avatar
Andrewer016
Explorer | Level 4
5 years ago

[C#] Large files don't show up in user's cloud after finishing batch upload

Hey there!

I'm trying to upload large files with batch upload (as it is suggested by Dropbox: https://www.dropbox.com/developers/reference/data-ingress-guide).

Everything seems to be working except when I finish the batch upload with the UploadSessionFinishBatchAsync method, the large files won't show up in the cloud.

For clarity here's the method I use:

public async Task UploadFileBatch(List<string> localPaths)
        {
            //const int ChunkSize = 4096 * 1024;
            const int ChunkSize = 1024 * 1024;
            List<UploadSessionFinishArg> uploadSessionFinishArgs = new List<UploadSessionFinishArg>();
            List<FileStream> openedFileStreams = new List<FileStream>();
            using (var dbx = new DropboxClient("<REDACTED>"))
            {
                for (int i = 0; i < localPaths.Count; i++)
                {
                    string[] localPathBits = localPaths[i].Split('\\');
                    string remotePath = remoteUploadPath;
                    foreach (var bit in localPathBits)
                    {
                        if (!bit.Equals("..") && !bit.Equals("CRUD_tests"))
                        {
                            remotePath += bit + "/";
                        }
                    }
                    remotePath = remotePath.Remove(remotePath.Length - 1);

                    var fileInfo = new FileInfo(localPaths[i]);
                    FileStream fileStream = fileInfo.Open(FileMode.Open, FileAccess.Read, FileShare.Read);
                    openedFileStreams.Add(fileStream);
                    if (fileStream.Length <= ChunkSize)
                    {
                        var offset = (ulong)fileStream.Length;
                        var result = await dbx.Files.UploadSessionStartAsync(true, fileStream);
                        var sessionId = result.SessionId;
                        var cursor = new UploadSessionCursor(sessionId, offset);
                        UploadSessionFinishArg uploadSessionFinishArg = new UploadSessionFinishArg(cursor, new CommitInfo(remotePath, WriteMode.Overwrite.Instance));
                        uploadSessionFinishArgs.Add(uploadSessionFinishArg);
                    }
                    else
                    {
                        string sessionId = null;
                        Console.WriteLine("IN BIG BATCH");
                        byte[] buffer = new byte[ChunkSize];
                        ulong numChunks = (ulong)Math.Ceiling((double)fileStream.Length / ChunkSize);
                        Console.WriteLine("numChunks: " + numChunks);
                        for (ulong idx = 0; idx < numChunks; idx++)
                        {
                            Console.WriteLine("UPLOADING CHUNK #{0}", idx + 1);
                            var byteRead = fileStream.Read(buffer, 0, ChunkSize);

                            using (var memStream = new MemoryStream(buffer, 0, byteRead))
                            {
                                if (idx == 0)
                                {
                                    var result = await dbx.Files.UploadSessionStartAsync(false, memStream);
                                    sessionId = result.SessionId;
                                }
                                else
                                {
                                    Console.WriteLine(localPaths[i] + " : " + sessionId + " : " + (ulong)ChunkSize * idx);
                                    var cursor = new UploadSessionCursor(sessionId, (ulong)ChunkSize * idx);

                                    if (idx == numChunks - 1)
                                    {
                                        await dbx.Files.UploadSessionAppendV2Async(cursor, true, memStream);
                                        cursor = new UploadSessionCursor(sessionId, (ulong)ChunkSize * idx);
                                        UploadSessionFinishArg uploadSessionFinishArg = new UploadSessionFinishArg(cursor, new CommitInfo(remotePath, WriteMode.Overwrite.Instance));
                                        uploadSessionFinishArgs.Add(uploadSessionFinishArg);
                                        Console.WriteLine("FINISHING CHUNK UPLOAD");
                                    }
                                    else
                                    {
                                        await dbx.Files.UploadSessionAppendV2Async(cursor, false, memStream);
                                    }
                                }
                            }
                        }
                    }
                }
                foreach (var arg in uploadSessionFinishArgs)
                {
                    Console.WriteLine(arg.Commit.Path);
                    Console.WriteLine(arg.Cursor.SessionId);
                    Console.WriteLine(arg.Cursor.Offset);
                }
                var batchResult = dbx.Files.UploadSessionFinishBatchAsync(uploadSessionFinishArgs).Result;
                Console.WriteLine("isAsyncJobId: {0} isComplete: {1}, isOther: {2}", batchResult.IsAsyncJobId, batchResult.IsComplete, batchResult.IsOther);
                Console.WriteLine(batchResult.AsAsyncJobId.Value);
                var status = await dbx.Files.UploadSessionFinishBatchCheckAsync(batchResult.AsAsyncJobId.Value);
                    while (status.IsComplete == false)
                {
                    Console.WriteLine("Complete: {0}, inProgress: {1}", status.IsComplete, status.IsInProgress);
                    status = await dbx.Files.UploadSessionFinishBatchCheckAsync(batchResult.AsAsyncJobId.Value);
                }
                Console.WriteLine("Complete: {0}, inProgress: {1}", status.IsComplete, status.IsInProgress);
                foreach (var fileStream in openedFileStreams)
                {
                    fileStream.Dispose();
                }
            }
        }

Basically all I do is, I check if the received file is larger than a given size (now it's 1 Mb) and if it is, I upload it in chunks rather than the whole file in one upload.

Obviously I'm using batch upload to avoid lock contentation.

The thing is that the small files, that are smaller then the ChunkSize valuable, show up fine in the cloud, bat the large files don't, even though everything comes back true.

A run output:

Processed file '..\..\..\..\CRUD_tests\Generated\Files\randomFile0.txt'.
IN BIG BATCH
numChunks: 7
UPLOADING CHUNK #1
UPLOADING CHUNK #2
..\..\..\..\CRUD_tests\Generated\Files\randomFile0.txt : AAAAAAAAASchRyKW64_ouA : 1048576
UPLOADING CHUNK #3
..\..\..\..\CRUD_tests\Generated\Files\randomFile0.txt : AAAAAAAAASchRyKW64_ouA : 2097152
UPLOADING CHUNK #4
..\..\..\..\CRUD_tests\Generated\Files\randomFile0.txt : AAAAAAAAASchRyKW64_ouA : 3145728
UPLOADING CHUNK #5
..\..\..\..\CRUD_tests\Generated\Files\randomFile0.txt : AAAAAAAAASchRyKW64_ouA : 4194304
UPLOADING CHUNK #6
..\..\..\..\CRUD_tests\Generated\Files\randomFile0.txt : AAAAAAAAASchRyKW64_ouA : 5242880
UPLOADING CHUNK #7
..\..\..\..\CRUD_tests\Generated\Files\randomFile0.txt : AAAAAAAAASchRyKW64_ouA : 6291456
FINISHING CHUNK UPLOAD
/UploadTest/Generated/Files/randomFile0.txt
AAAAAAAAASchRyKW64_ouA
6291456
isAsyncJobId: True isComplete: False, isOther: False
dbjid:AABC0shKW6B4Q2jRSr-MN-OgBGvJ7Myfn6AzhwsF-thuOq4kGnOqdO0B-cLK9vUIKkmR5BBZjL4olQ16-hBUMtlD
Complete: False, inProgress: True
Complete: False, inProgress: True
Complete: True, inProgress: False
Uploads finished.

This is just one large file now, but if I'd have 2 smaller files besides it, those would be uploaded and would show up in the cloud. In fact it seems like the large files are successfully uploaded too, yet they aren't.

So anyone has any idea what's the problem here? I ran out of ideas.

Thanks in advance for the help!

Cheers,

Andrew

About Dropbox API Support & Feedback

Node avatar for Dropbox API Support & Feedback

Find help with the Dropbox API from other developers.

5,882 PostsLatest Activity: 3 years ago
325 Following

If you need more help you can view your support options (expected response time for an email or ticket is 24 hours), or contact us on X or Facebook.

For more info on available support options for your Dropbox plan, see this article.

If you found the answer to your question in this Community thread, please 'like' the post to say thanks and to let us know it was useful!