.NET & C# Development · Lesson 78 of 92

Secure File Uploads — Validate Type, Size & Malware Risk

IFormFile for Small Files

For files under ~30 MB, IFormFile is the simplest approach. ASP.NET Core buffers the entire file into memory (or a temp file) before your action runs.

C#
// Controllers/UploadsController.cs
[ApiController]
[Route("api/uploads")]
public class UploadsController : ControllerBase
{
    [HttpPost("small")]
    [RequestSizeLimit(30 * 1024 * 1024)] // 30 MB cap
    public async Task<IActionResult> UploadSmall(IFormFile file)
    {
        if (file is null || file.Length == 0)
            return BadRequest("No file received.");

        var allowed = new[] { ".jpg", ".jpeg", ".png", ".pdf" };
        var ext = Path.GetExtension(file.FileName).ToLowerInvariant();
        if (!allowed.Contains(ext))
            return BadRequest($"Extension {ext} is not allowed.");

        var savePath = Path.Combine("uploads", Guid.NewGuid() + ext);
        Directory.CreateDirectory("uploads");

        await using var stream = System.IO.File.Create(savePath);
        await file.CopyToAsync(stream);

        return Ok(new { path = savePath, size = file.Length });
    }
}

Streaming Large Files (No Buffering)

Buffering a 500 MB video into memory kills your server. Disable buffering and read the multipart body manually.

C#
// Program.cs — disable form value model binding limit
builder.Services.Configure<FormOptions>(o =>
{
    o.MultipartBodyLengthLimit = 2L * 1024 * 1024 * 1024; // 2 GB
});
C#
[HttpPost("large")]
[DisableRequestSizeLimit]
[DisableFormValueModelBinding] // custom attribute shown below
public async Task<IActionResult> UploadLarge()
{
    if (!MultipartRequestHelper.IsMultipartContentType(Request.ContentType))
        return BadRequest("Not a multipart request.");

    var boundary = MultipartRequestHelper.GetBoundary(
        MediaTypeHeaderValue.Parse(Request.ContentType), 70);
    var reader = new MultipartReader(boundary, Request.Body);

    MultipartSection? section;
    while ((section = await reader.ReadNextSectionAsync()) != null)
    {
        if (!ContentDispositionHeaderValue.TryParse(
                section.ContentDisposition, out var cd) || !cd.IsFileDisposition())
            continue;

        var fileName = cd.FileName.Value ?? "upload";
        var savePath = Path.Combine("uploads", Guid.NewGuid() + Path.GetExtension(fileName));

        await using var fileStream = System.IO.File.Create(savePath);
        await section.Body.CopyToAsync(fileStream); // streams — never fully in memory
    }

    return Ok();
}
C#
// DisableFormValueModelBindingAttribute.cs
[AttributeUsage(AttributeTargets.Class | AttributeTargets.Method)]
public class DisableFormValueModelBindingAttribute : Attribute, IResourceFilter
{
    public void OnResourceExecuting(ResourceExecutingContext context)
    {
        var factories = context.ValueProviderFactories;
        factories.RemoveType<FormValueProviderFactory>();
        factories.RemoveType<FormFileValueProviderFactory>();
        factories.RemoveType<JQueryFormValueProviderFactory>();
    }
    public void OnResourceExecuted(ResourceExecutedContext context) { }
}

File Type Validation — Extension + Magic Bytes

Never trust the extension alone. Read the first bytes of the stream.

C#
public static class FileValidator
{
    private static readonly Dictionary<string, byte[]> MagicBytes = new()
    {
        { ".jpg",  new byte[] { 0xFF, 0xD8, 0xFF } },
        { ".png",  new byte[] { 0x89, 0x50, 0x4E, 0x47 } },
        { ".pdf",  new byte[] { 0x25, 0x50, 0x44, 0x46 } },
        { ".zip",  new byte[] { 0x50, 0x4B, 0x03, 0x04 } },
    };

    public static async Task<bool> IsValidAsync(Stream stream, string extension)
    {
        if (!MagicBytes.TryGetValue(extension.ToLowerInvariant(), out var magic))
            return false;

        var buffer = new byte[magic.Length];
        var read = await stream.ReadAsync(buffer.AsMemory(0, magic.Length));
        stream.Position = 0; // rewind for later use

        return read == magic.Length && buffer.SequenceEqual(magic);
    }
}

// Usage in controller
var ext = Path.GetExtension(file.FileName).ToLowerInvariant();
await using var stream = file.OpenReadStream();
if (!await FileValidator.IsValidAsync(stream, ext))
    return BadRequest("File content does not match its extension.");

Virus Scanning Hook

Most production systems pipe uploads through ClamAV or a cloud scanner before saving permanently.

C#
public interface IVirusScanner
{
    Task<bool> IsSafeAsync(Stream fileStream, CancellationToken ct = default);
}

// ClamAV implementation via nClam NuGet
public class ClamAvScanner : IVirusScanner
{
    private readonly ClamClient _clam;
    public ClamAvScanner(IConfiguration cfg)
        => _clam = new ClamClient(cfg["ClamAV:Host"]!, int.Parse(cfg["ClamAV:Port"]!));

    public async Task<bool> IsSafeAsync(Stream fileStream, CancellationToken ct = default)
    {
        var result = await _clam.SendAndScanFileAsync(fileStream, ct);
        return result.Result == ClamScanResults.Clean;
    }
}

// In controller — scan before persisting
if (!await _virusScanner.IsSafeAsync(stream))
    return UnprocessableEntity("File failed virus scan.");
stream.Position = 0;

Storing to Disk vs Azure Blob Storage

C#
// Services/IFileStore.cs
public interface IFileStore
{
    Task<string> SaveAsync(Stream content, string fileName, string contentType);
    Task<Stream> GetAsync(string key);
    Task DeleteAsync(string key);
}
C#
// Services/AzureBlobFileStore.cs
public class AzureBlobFileStore : IFileStore
{
    private readonly BlobContainerClient _container;

    public AzureBlobFileStore(IConfiguration cfg)
    {
        var serviceClient = new BlobServiceClient(cfg.GetConnectionString("AzureStorage"));
        _container = serviceClient.GetBlobContainerClient("uploads");
        _container.CreateIfNotExists(PublicAccessType.None);
    }

    public async Task<string> SaveAsync(Stream content, string fileName, string contentType)
    {
        var key = $"{Guid.NewGuid()}/{fileName}";
        var blob = _container.GetBlobClient(key);
        await blob.UploadAsync(content, new BlobUploadOptions
        {
            HttpHeaders = new BlobHttpHeaders { ContentType = contentType }
        });
        return key;
    }

    public async Task<Stream> GetAsync(string key)
    {
        var blob = _container.GetBlobClient(key);
        var response = await blob.DownloadStreamingAsync();
        return response.Value.Content;
    }

    public async Task DeleteAsync(string key)
        => await _container.GetBlobClient(key).DeleteIfExistsAsync();
}

Register it: builder.Services.AddSingleton<IFileStore, AzureBlobFileStore>();

Returning File Downloads

C#
[HttpGet("{id}/download")]
public async Task<IActionResult> Download(Guid id)
{
    var attachment = await _db.Attachments.FindAsync(id);
    if (attachment is null) return NotFound();

    var stream = await _fileStore.GetAsync(attachment.StorageKey);

    // inline: browser tries to display; attachment: forces download
    return File(stream, attachment.ContentType,
                fileDownloadName: attachment.OriginalName);
}

Multipart: File + JSON Together

When you need metadata alongside the binary, send both as multipart parts.

C#
[HttpPost("with-metadata")]
public async Task<IActionResult> UploadWithMetadata(
    [FromForm] IFormFile file,
    [FromForm] string metadata) // JSON string in form field
{
    var meta = JsonSerializer.Deserialize<UploadMetadata>(metadata)
               ?? throw new BadHttpRequestException("Invalid metadata JSON.");

    // process file and meta together
    return Ok(new { meta.Title, meta.Tags, file.Length });
}

public record UploadMetadata(string Title, string[] Tags);

Chunked Upload with Progress Tracking

For resumable uploads, accept numbered chunks and assemble them server-side.

C#
[HttpPost("chunk")]
public async Task<IActionResult> UploadChunk(
    [FromForm] IFormFile chunk,
    [FromForm] string uploadId,
    [FromForm] int chunkIndex,
    [FromForm] int totalChunks)
{
    var tempDir = Path.Combine("uploads", "temp", uploadId);
    Directory.CreateDirectory(tempDir);

    var chunkPath = Path.Combine(tempDir, $"{chunkIndex:D5}");
    await using (var fs = System.IO.File.Create(chunkPath))
        await chunk.CopyToAsync(fs);

    var received = Directory.GetFiles(tempDir).Length;
    if (received < totalChunks)
        return Ok(new { received, totalChunks, complete = false });

    // All chunks arrived — assemble
    var finalPath = Path.Combine("uploads", $"{uploadId}{Path.GetExtension(chunk.FileName)}");
    await using (var final = System.IO.File.Create(finalPath))
    {
        foreach (var part in Directory.GetFiles(tempDir).OrderBy(f => f))
        {
            await using var partStream = System.IO.File.OpenRead(part);
            await partStream.CopyToAsync(final);
        }
    }
    Directory.Delete(tempDir, recursive: true);

    return Ok(new { received, totalChunks, complete = true, path = finalPath });
}

The client tracks received / totalChunks for a progress bar. Use a background job for the assembly step on very large files.