Use io#LimitReader to avoid panic when reading files with active writes; closes pterodactyl/panel#3131

This commit is contained in:
Dane Everitt 2021-04-04 10:42:03 -07:00
parent f57c24002e
commit 16b0ca3a8e

View File

@ -3,6 +3,7 @@ package router
import ( import (
"bufio" "bufio"
"context" "context"
"io"
"mime/multipart" "mime/multipart"
"net/http" "net/http"
"net/url" "net/url"
@ -43,8 +44,16 @@ func getServerFileContents(c *gin.Context) {
c.Header("Content-Type", "application/octet-stream") c.Header("Content-Type", "application/octet-stream")
} }
defer c.Writer.Flush() defer c.Writer.Flush()
_, err = bufio.NewReader(f).WriteTo(c.Writer) // If you don't do a limited reader here you will trigger a panic on write when
if err != nil { // a different server process writes content to the file after you've already
// determined the file size. This could lead to some weird content output but
// it would technically be accurate based on the content at the time of the request.
//
// "http: wrote more than the declared Content-Length"
//
// @see https://github.com/pterodactyl/panel/issues/3131
r := io.LimitReader(f, st.Size())
if _, err = bufio.NewReader(r).WriteTo(c.Writer); err != nil {
// Pretty sure this will unleash chaos on the response, but its a risk we can // Pretty sure this will unleash chaos on the response, but its a risk we can
// take since a panic will at least be recovered and this should be incredibly // take since a panic will at least be recovered and this should be incredibly
// rare? // rare?