Add tags to S3 uploaded files

This commit is contained in:
Jarrod Lowe 2025-04-28 16:56:56 +12:00
parent 852d9dcbe9
commit 141810039f
2 changed files with 22 additions and 10 deletions

View File

@ -95,7 +95,13 @@ def upload_file(
id = str(uuid.uuid4())
name = filename
filename = f"{id}_{filename}"
contents, file_path = Storage.upload_file(file.file, filename)
tags = {
"OpenWebUI-User-Email": user.email,
"OpenWebUI-User-Id": user.id,
"OpenWebUI-User-Name": user.name,
"OpenWebUI-File-Id": id,
}
contents, file_path = Storage.upload_file(file.file, filename, tags)
file_item = Files.insert_new_file(
user.id,

View File

@ -3,7 +3,7 @@ import shutil
import json
import logging
from abc import ABC, abstractmethod
from typing import BinaryIO, Tuple
from typing import BinaryIO, Tuple, Dict
import boto3
from botocore.config import Config
@ -44,7 +44,7 @@ class StorageProvider(ABC):
pass
@abstractmethod
def upload_file(self, file: BinaryIO, filename: str) -> Tuple[bytes, str]:
def upload_file(self, file: BinaryIO, filename: str, tags: Dict[str, str]) -> Tuple[bytes, str]:
pass
@abstractmethod
@ -58,7 +58,7 @@ class StorageProvider(ABC):
class LocalStorageProvider(StorageProvider):
@staticmethod
def upload_file(file: BinaryIO, filename: str) -> Tuple[bytes, str]:
def upload_file(file: BinaryIO, filename: str, tags: Dict[str, str]) -> Tuple[bytes, str]:
contents = file.read()
if not contents:
raise ValueError(ERROR_MESSAGES.EMPTY_CONTENT)
@ -131,12 +131,18 @@ class S3StorageProvider(StorageProvider):
self.bucket_name = S3_BUCKET_NAME
self.key_prefix = S3_KEY_PREFIX if S3_KEY_PREFIX else ""
def upload_file(self, file: BinaryIO, filename: str) -> Tuple[bytes, str]:
def upload_file(self, file: BinaryIO, filename: str, tags: Dict[str, str]) -> Tuple[bytes, str]:
"""Handles uploading of the file to S3 storage."""
_, file_path = LocalStorageProvider.upload_file(file, filename)
_, file_path = LocalStorageProvider.upload_file(file, filename, tags)
tagging = {'TagSet': [{'Key': k, 'Value': v} for k, v in tags.items()]}
try:
s3_key = os.path.join(self.key_prefix, filename)
self.s3_client.upload_file(file_path, self.bucket_name, s3_key)
self.s3_client.put_object_tagging(
Bucket=self.bucket_name,
Key=s3_key,
Tagging=tagging,
)
return (
open(file_path, "rb").read(),
"s3://" + self.bucket_name + "/" + s3_key,
@ -207,9 +213,9 @@ class GCSStorageProvider(StorageProvider):
self.gcs_client = storage.Client()
self.bucket = self.gcs_client.bucket(GCS_BUCKET_NAME)
def upload_file(self, file: BinaryIO, filename: str) -> Tuple[bytes, str]:
def upload_file(self, file: BinaryIO, filename: str, tags: Dict[str, str]) -> Tuple[bytes, str]:
"""Handles uploading of the file to GCS storage."""
contents, file_path = LocalStorageProvider.upload_file(file, filename)
contents, file_path = LocalStorageProvider.upload_file(file, filename, tags)
try:
blob = self.bucket.blob(filename)
blob.upload_from_filename(file_path)
@ -277,9 +283,9 @@ class AzureStorageProvider(StorageProvider):
self.container_name
)
def upload_file(self, file: BinaryIO, filename: str) -> Tuple[bytes, str]:
def upload_file(self, file: BinaryIO, filename: str, tags: Dict[str, str]) -> Tuple[bytes, str]:
"""Handles uploading of the file to Azure Blob Storage."""
contents, file_path = LocalStorageProvider.upload_file(file, filename)
contents, file_path = LocalStorageProvider.upload_file(file, filename, tags)
try:
blob_client = self.container_client.get_blob_client(filename)
blob_client.upload_blob(contents, overwrite=True)