summaryrefslogtreecommitdiff
path: root/internal/media/processingmedia.go
diff options
context:
space:
mode:
authorLibravatar tobi <31960611+tsmethurst@users.noreply.github.com>2022-09-24 11:11:47 +0200
committerLibravatar GitHub <noreply@github.com>2022-09-24 11:11:47 +0200
commit78409f198566e2102c2aa97c022a5068a96f329b (patch)
tree3b0b38c5919cfbd3b7458af37acb5a9fd922f8ed /internal/media/processingmedia.go
parent[feature] Allow delivery to sharedInboxes where possible (#847) (diff)
downloadgotosocial-78409f198566e2102c2aa97c022a5068a96f329b.tar.xz
[bugfix] Wrap media reader in length reader to determine length if no `content-length` given (#848)
* use lengthReader 2 determine fileSize if not given * update tests * small fixes * go fmt
Diffstat (limited to 'internal/media/processingmedia.go')
-rw-r--r--internal/media/processingmedia.go42
1 files changed, 20 insertions, 22 deletions
diff --git a/internal/media/processingmedia.go b/internal/media/processingmedia.go
index e537c8301..5a8e6f590 100644
--- a/internal/media/processingmedia.go
+++ b/internal/media/processingmedia.go
@@ -268,7 +268,6 @@ func (p *ProcessingMedia) store(ctx context.Context) error {
if err != nil {
return fmt.Errorf("store: error executing data function: %s", err)
}
- log.Tracef("store: reading %d bytes from data function for media %s", fileSize, p.attachment.URL)
// defer closing the reader when we're done with it
defer func() {
@@ -304,49 +303,48 @@ func (p *ProcessingMedia) store(ctx context.Context) error {
extension := split[1] // something like 'jpeg'
// concatenate the cleaned up first bytes with the existing bytes still in the reader (thanks Mara)
- multiReader := io.MultiReader(bytes.NewBuffer(firstBytes), reader)
+ readerToStore := io.MultiReader(bytes.NewBuffer(firstBytes), reader)
- // we'll need to clean exif data from the first bytes; while we're
- // here, we can also use the extension to derive the attachment type
- var clean io.Reader
+ // use the extension to derive the attachment type
+ // and, while we're in here, clean up exif data from
+ // the image if we already know the fileSize
switch extension {
case mimeGif:
p.attachment.Type = gtsmodel.FileTypeImage
- clean = multiReader // nothing to clean from a gif
case mimeJpeg, mimePng:
p.attachment.Type = gtsmodel.FileTypeImage
- purged, err := terminator.Terminate(multiReader, fileSize, extension)
- if err != nil {
- return fmt.Errorf("store: exif error: %s", err)
+ if fileSize > 0 {
+ var err error
+ readerToStore, err = terminator.Terminate(readerToStore, fileSize, extension)
+ if err != nil {
+ return fmt.Errorf("store: exif error: %s", err)
+ }
+ defer func() {
+ if rc, ok := readerToStore.(io.ReadCloser); ok {
+ if err := rc.Close(); err != nil {
+ log.Errorf("store: error closing terminator reader: %s", err)
+ }
+ }
+ }()
}
- clean = purged
default:
return fmt.Errorf("store: couldn't process %s", extension)
}
- // defer closing the clean reader when we're done with it
- defer func() {
- if rc, ok := clean.(io.ReadCloser); ok {
- if err := rc.Close(); err != nil {
- log.Errorf("store: error closing clean readcloser: %s", err)
- }
- }
- }()
-
// now set some additional fields on the attachment since
// we know more about what the underlying media actually is
p.attachment.URL = uris.GenerateURIForAttachment(p.attachment.AccountID, string(TypeAttachment), string(SizeOriginal), p.attachment.ID, extension)
- p.attachment.File.Path = fmt.Sprintf("%s/%s/%s/%s.%s", p.attachment.AccountID, TypeAttachment, SizeOriginal, p.attachment.ID, extension)
p.attachment.File.ContentType = contentType
- p.attachment.File.FileSize = fileSize
+ p.attachment.File.Path = fmt.Sprintf("%s/%s/%s/%s.%s", p.attachment.AccountID, TypeAttachment, SizeOriginal, p.attachment.ID, extension)
// store this for now -- other processes can pull it out of storage as they please
- if err := p.storage.PutStream(ctx, p.attachment.File.Path, clean); err != nil && err != storage.ErrAlreadyExists {
+ if fileSize, err = putStream(ctx, p.storage, p.attachment.File.Path, readerToStore, fileSize); err != nil && err != storage.ErrAlreadyExists {
return fmt.Errorf("store: error storing stream: %s", err)
}
cached := true
p.attachment.Cached = &cached
+ p.attachment.File.FileSize = fileSize
p.read = true
if p.postData != nil {