Compare commits
1 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
168f5eac83 |
@@ -123,6 +123,7 @@ func main() {
|
||||
db.CategoryRepo,
|
||||
db.FeatureFlagRepo,
|
||||
permissionService,
|
||||
encryptor,
|
||||
)
|
||||
|
||||
if err := permissionService.EnsureAdminGroup(context.Background()); err != nil {
|
||||
@@ -147,6 +148,8 @@ func main() {
|
||||
adminHandler := handlers.NewAdminHandler(adminService)
|
||||
publicHandler := handlers.NewPublicHandler(spaceService, noteService)
|
||||
settingsHandler := handlers.NewSettingsHandler(authService)
|
||||
fileService := services.NewFileService(db.FeatureFlagRepo, db.MembershipRepo, encryptor)
|
||||
fileHandler := handlers.NewFileHandler(fileService)
|
||||
|
||||
// Create router
|
||||
router := mux.NewRouter()
|
||||
@@ -210,6 +213,14 @@ func main() {
|
||||
api.HandleFunc("/spaces/{spaceId}/categories/{categoryId}", categoryHandler.DeleteCategory).Methods("DELETE")
|
||||
api.HandleFunc("/spaces/{spaceId}/categories/{categoryId}/move", categoryHandler.MoveCategory).Methods("PATCH")
|
||||
|
||||
// File explorer endpoints (space-scoped)
|
||||
api.HandleFunc("/spaces/{spaceId}/files/list", fileHandler.ListFiles).Methods("GET")
|
||||
api.HandleFunc("/spaces/{spaceId}/files/object", fileHandler.GetFile).Methods("GET")
|
||||
api.HandleFunc("/spaces/{spaceId}/files/upload", fileHandler.UploadFile).Methods("POST")
|
||||
api.HandleFunc("/spaces/{spaceId}/files/folder", fileHandler.CreateFolder).Methods("POST")
|
||||
api.HandleFunc("/spaces/{spaceId}/files/object", fileHandler.DeleteFile).Methods("DELETE")
|
||||
api.HandleFunc("/spaces/{spaceId}/files/folder", fileHandler.DeleteFolder).Methods("DELETE")
|
||||
|
||||
// Admin endpoints
|
||||
admin := router.PathPrefix("/api/v1/admin").Subrouter()
|
||||
admin.Use(authMiddleware.Middleware)
|
||||
|
||||
@@ -12,6 +12,18 @@ require (
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/aws/aws-sdk-go-v2 v1.41.4 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.8 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/credentials v1.19.12 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.20 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.20 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/internal/v4a v1.4.21 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.7 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.9.12 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.20 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.19.20 // indirect
|
||||
github.com/aws/aws-sdk-go-v2/service/s3 v1.97.2 // indirect
|
||||
github.com/aws/smithy-go v1.24.2 // indirect
|
||||
github.com/klauspost/compress v1.17.6 // indirect
|
||||
github.com/xdg-go/pbkdf2 v1.0.0 // indirect
|
||||
github.com/xdg-go/scram v1.2.0 // indirect
|
||||
|
||||
@@ -1,3 +1,27 @@
|
||||
github.com/aws/aws-sdk-go-v2 v1.41.4 h1:10f50G7WyU02T56ox1wWXq+zTX9I1zxG46HYuG1hH/k=
|
||||
github.com/aws/aws-sdk-go-v2 v1.41.4/go.mod h1:mwsPRE8ceUUpiTgF7QmQIJ7lgsKUPQOUl3o72QBrE1o=
|
||||
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.8 h1:eBMB84YGghSocM7PsjmmPffTa+1FBUeNvGvFou6V/4o=
|
||||
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.7.8/go.mod h1:lyw7GFp3qENLh7kwzf7iMzAxDn+NzjXEAGjKS2UOKqI=
|
||||
github.com/aws/aws-sdk-go-v2/credentials v1.19.12 h1:oqtA6v+y5fZg//tcTWahyN9PEn5eDU/Wpvc2+kJ4aY8=
|
||||
github.com/aws/aws-sdk-go-v2/credentials v1.19.12/go.mod h1:U3R1RtSHx6NB0DvEQFGyf/0sbrpJrluENHdPy1j/3TE=
|
||||
github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.20 h1:CNXO7mvgThFGqOFgbNAP2nol2qAWBOGfqR/7tQlvLmc=
|
||||
github.com/aws/aws-sdk-go-v2/internal/configsources v1.4.20/go.mod h1:oydPDJKcfMhgfcgBUZaG+toBbwy8yPWubJXBVERtI4o=
|
||||
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.20 h1:tN6W/hg+pkM+tf9XDkWUbDEjGLb+raoBMFsTodcoYKw=
|
||||
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.7.20/go.mod h1:YJ898MhD067hSHA6xYCx5ts/jEd8BSOLtQDL3iZsvbc=
|
||||
github.com/aws/aws-sdk-go-v2/internal/v4a v1.4.21 h1:SwGMTMLIlvDNyhMteQ6r8IJSBPlRdXX5d4idhIGbkXA=
|
||||
github.com/aws/aws-sdk-go-v2/internal/v4a v1.4.21/go.mod h1:UUxgWxofmOdAMuqEsSppbDtGKLfR04HGsD0HXzvhI1k=
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.7 h1:5EniKhLZe4xzL7a+fU3C2tfUN4nWIqlLesfrjkuPFTY=
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.13.7/go.mod h1:x0nZssQ3qZSnIcePWLvcoFisRXJzcTVvYpAAdYX8+GI=
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.9.12 h1:qtJZ70afD3ISKWnoX3xB0J2otEqu3LqicRcDBqsj0hQ=
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.9.12/go.mod h1:v2pNpJbRNl4vEUWEh5ytQok0zACAKfdmKS51Hotc3pQ=
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.20 h1:2HvVAIq+YqgGotK6EkMf+KIEqTISmTYh5zLpYyeTo1Y=
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.13.20/go.mod h1:V4X406Y666khGa8ghKmphma/7C0DAtEQYhkq9z4vpbk=
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.19.20 h1:siU1A6xjUZ2N8zjTHSXFhB9L/2OY8Dqs0xXiLjF30jA=
|
||||
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.19.20/go.mod h1:4TLZCmVJDM3FOu5P5TJP0zOlu9zWgDWU7aUxWbr+rcw=
|
||||
github.com/aws/aws-sdk-go-v2/service/s3 v1.97.2 h1:MRNiP6nqa20aEl8fQ6PJpEq11b2d40b16sm4WD7QgMU=
|
||||
github.com/aws/aws-sdk-go-v2/service/s3 v1.97.2/go.mod h1:FrNA56srbsr3WShiaelyWYEo70x80mXnVZ17ZZfbeqg=
|
||||
github.com/aws/smithy-go v1.24.2 h1:FzA3bu/nt/vDvmnkg+R8Xl46gmzEDam6mZ1hzmwXFng=
|
||||
github.com/aws/smithy-go v1.24.2/go.mod h1:YE2RhdIuDbA5E5bTdciG9KrW3+TiEONeUWCqxX9i1Fc=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/golang-jwt/jwt/v5 v5.2.0 h1:d/ix8ftRUorsN+5eMIlF4T6J8CAt9rch3My2winC1Jw=
|
||||
|
||||
@@ -62,6 +62,12 @@ type FeatureFlagsDTO struct {
|
||||
RegistrationEnabled bool `json:"registration_enabled"`
|
||||
ProviderLoginEnabled bool `json:"provider_login_enabled"`
|
||||
PublicSharingEnabled bool `json:"public_sharing_enabled"`
|
||||
FileExplorerEnabled bool `json:"file_explorer_enabled"`
|
||||
S3Endpoint string `json:"s3_endpoint,omitempty"`
|
||||
S3Bucket string `json:"s3_bucket,omitempty"`
|
||||
S3Region string `json:"s3_region,omitempty"`
|
||||
S3AccessKey string `json:"s3_access_key,omitempty"`
|
||||
S3SecretKeySet bool `json:"s3_secret_key_set"`
|
||||
}
|
||||
|
||||
// UpdateFeatureFlagsRequest represents admin payload for feature flag updates.
|
||||
@@ -69,6 +75,12 @@ type UpdateFeatureFlagsRequest struct {
|
||||
RegistrationEnabled bool `json:"registration_enabled"`
|
||||
ProviderLoginEnabled bool `json:"provider_login_enabled"`
|
||||
PublicSharingEnabled bool `json:"public_sharing_enabled"`
|
||||
FileExplorerEnabled bool `json:"file_explorer_enabled"`
|
||||
S3Endpoint string `json:"s3_endpoint"`
|
||||
S3Bucket string `json:"s3_bucket"`
|
||||
S3Region string `json:"s3_region"`
|
||||
S3AccessKey string `json:"s3_access_key"`
|
||||
S3SecretKey string `json:"s3_secret_key"` // empty = keep existing encrypted value
|
||||
}
|
||||
|
||||
// UserDTO represents a user in API responses
|
||||
@@ -206,6 +218,12 @@ func NewFeatureFlagsDTO(flags *entities.FeatureFlags) *FeatureFlagsDTO {
|
||||
RegistrationEnabled: flags.RegistrationEnabled,
|
||||
ProviderLoginEnabled: flags.ProviderLoginEnabled,
|
||||
PublicSharingEnabled: flags.PublicSharingEnabled,
|
||||
FileExplorerEnabled: flags.FileExplorerEnabled,
|
||||
S3Endpoint: flags.S3Endpoint,
|
||||
S3Bucket: flags.S3Bucket,
|
||||
S3Region: flags.S3Region,
|
||||
S3AccessKey: flags.S3AccessKey,
|
||||
S3SecretKeySet: flags.S3SecretKey != "",
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -10,6 +10,7 @@ import (
|
||||
"github.com/noteapp/backend/internal/application/dto"
|
||||
"github.com/noteapp/backend/internal/domain/entities"
|
||||
"github.com/noteapp/backend/internal/domain/repositories"
|
||||
"github.com/noteapp/backend/internal/infrastructure/security"
|
||||
)
|
||||
|
||||
// AdminService handles admin-level operations
|
||||
@@ -22,6 +23,7 @@ type AdminService struct {
|
||||
categoryRepo repositories.CategoryRepository
|
||||
featureFlagRepo repositories.FeatureFlagRepository
|
||||
permissionService *PermissionService
|
||||
encryptor *security.Encryptor
|
||||
}
|
||||
|
||||
// NewAdminService creates a new AdminService
|
||||
@@ -34,6 +36,7 @@ func NewAdminService(
|
||||
categoryRepo repositories.CategoryRepository,
|
||||
featureFlagRepo repositories.FeatureFlagRepository,
|
||||
permissionService *PermissionService,
|
||||
encryptor *security.Encryptor,
|
||||
) *AdminService {
|
||||
return &AdminService{
|
||||
userRepo: userRepo,
|
||||
@@ -44,6 +47,7 @@ func NewAdminService(
|
||||
categoryRepo: categoryRepo,
|
||||
featureFlagRepo: featureFlagRepo,
|
||||
permissionService: permissionService,
|
||||
encryptor: encryptor,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -299,10 +303,31 @@ func (s *AdminService) UpdateFeatureFlags(ctx context.Context, req *dto.UpdateFe
|
||||
return nil, errors.New("feature flags are unavailable")
|
||||
}
|
||||
|
||||
// Load existing flags so we can preserve the encrypted S3 secret when not updated
|
||||
existing, err := s.featureFlagRepo.GetFeatureFlags(ctx)
|
||||
if err != nil {
|
||||
existing = entities.NewDefaultFeatureFlags()
|
||||
}
|
||||
|
||||
flags := &entities.FeatureFlags{
|
||||
RegistrationEnabled: req.RegistrationEnabled,
|
||||
ProviderLoginEnabled: req.ProviderLoginEnabled,
|
||||
PublicSharingEnabled: req.PublicSharingEnabled,
|
||||
FileExplorerEnabled: req.FileExplorerEnabled,
|
||||
S3Endpoint: strings.TrimSpace(req.S3Endpoint),
|
||||
S3Bucket: strings.TrimSpace(req.S3Bucket),
|
||||
S3Region: strings.TrimSpace(req.S3Region),
|
||||
S3AccessKey: strings.TrimSpace(req.S3AccessKey),
|
||||
S3SecretKey: existing.S3SecretKey, // keep encrypted secret by default
|
||||
}
|
||||
|
||||
// Only re-encrypt if a new secret was supplied
|
||||
if s.encryptor != nil && strings.TrimSpace(req.S3SecretKey) != "" {
|
||||
encrypted, err := s.encryptor.Encrypt(strings.TrimSpace(req.S3SecretKey))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
flags.S3SecretKey = encrypted
|
||||
}
|
||||
|
||||
if err := s.featureFlagRepo.UpdateFeatureFlags(ctx, flags); err != nil {
|
||||
|
||||
389
backend/internal/application/services/file_service.go
Normal file
389
backend/internal/application/services/file_service.go
Normal file
@@ -0,0 +1,389 @@
|
||||
package services
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"errors"
|
||||
"io"
|
||||
"path"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/aws/aws-sdk-go-v2/aws"
|
||||
"github.com/aws/aws-sdk-go-v2/credentials"
|
||||
"github.com/aws/aws-sdk-go-v2/service/s3"
|
||||
"github.com/aws/aws-sdk-go-v2/service/s3/types"
|
||||
"go.mongodb.org/mongo-driver/v2/bson"
|
||||
|
||||
"github.com/noteapp/backend/internal/domain/repositories"
|
||||
"github.com/noteapp/backend/internal/infrastructure/security"
|
||||
)
|
||||
|
||||
// S3Object represents a file or folder entry with key relative to the space root.
|
||||
type S3Object struct {
|
||||
Key string `json:"key"`
|
||||
Size int64 `json:"size"`
|
||||
LastModified string `json:"last_modified"`
|
||||
IsFolder bool `json:"is_folder"`
|
||||
}
|
||||
|
||||
// FileService handles S3 file operations scoped to individual spaces.
|
||||
type FileService struct {
|
||||
featureFlagRepo repositories.FeatureFlagRepository
|
||||
membershipRepo repositories.MembershipRepository
|
||||
encryptor *security.Encryptor
|
||||
}
|
||||
|
||||
// NewFileService creates a new FileService.
|
||||
func NewFileService(
|
||||
featureFlagRepo repositories.FeatureFlagRepository,
|
||||
membershipRepo repositories.MembershipRepository,
|
||||
encryptor *security.Encryptor,
|
||||
) *FileService {
|
||||
return &FileService{
|
||||
featureFlagRepo: featureFlagRepo,
|
||||
membershipRepo: membershipRepo,
|
||||
encryptor: encryptor,
|
||||
}
|
||||
}
|
||||
|
||||
type s3Config struct {
|
||||
client *s3.Client
|
||||
bucket string
|
||||
}
|
||||
|
||||
// buildS3Config loads feature flags, decrypts credentials, and returns an S3 client + bucket name.
|
||||
func (s *FileService) buildS3Config(ctx context.Context) (*s3Config, error) {
|
||||
flags, err := s.featureFlagRepo.GetFeatureFlags(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if !flags.FileExplorerEnabled {
|
||||
return nil, errors.New("file explorer is disabled")
|
||||
}
|
||||
if flags.S3Endpoint == "" || flags.S3Bucket == "" {
|
||||
return nil, errors.New("S3 is not configured")
|
||||
}
|
||||
|
||||
secretKey := ""
|
||||
if flags.S3SecretKey != "" && s.encryptor != nil {
|
||||
secretKey, err = s.encryptor.Decrypt(flags.S3SecretKey)
|
||||
if err != nil {
|
||||
return nil, errors.New("failed to decrypt S3 credentials")
|
||||
}
|
||||
}
|
||||
|
||||
region := flags.S3Region
|
||||
if region == "" {
|
||||
region = "us-east-1"
|
||||
}
|
||||
|
||||
cfg := aws.Config{
|
||||
Region: region,
|
||||
Credentials: credentials.NewStaticCredentialsProvider(flags.S3AccessKey, secretKey, ""),
|
||||
}
|
||||
|
||||
client := s3.NewFromConfig(cfg, func(o *s3.Options) {
|
||||
o.BaseEndpoint = aws.String(flags.S3Endpoint)
|
||||
o.UsePathStyle = true
|
||||
})
|
||||
|
||||
return &s3Config{client: client, bucket: flags.S3Bucket}, nil
|
||||
}
|
||||
|
||||
// validateAccess ensures file explorer is enabled and the user is a member of the space.
|
||||
// Returns a ready S3 config on success.
|
||||
func (s *FileService) validateAccess(ctx context.Context, userIDHex, spaceIDHex string) (*s3Config, error) {
|
||||
cfg, err := s.buildS3Config(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
userID, err := bson.ObjectIDFromHex(userIDHex)
|
||||
if err != nil {
|
||||
return nil, errors.New("access denied")
|
||||
}
|
||||
spaceID, err := bson.ObjectIDFromHex(spaceIDHex)
|
||||
if err != nil {
|
||||
return nil, errors.New("access denied")
|
||||
}
|
||||
|
||||
if _, err := s.membershipRepo.GetUserMembership(ctx, userID, spaceID); err != nil {
|
||||
return nil, errors.New("access denied")
|
||||
}
|
||||
|
||||
return cfg, nil
|
||||
}
|
||||
|
||||
// spaceBase returns the S3 key prefix for a space: "spaces/<spaceIDHex>/".
|
||||
func spaceBase(spaceIDHex string) string {
|
||||
return "spaces/" + spaceIDHex + "/"
|
||||
}
|
||||
|
||||
// resolveRelKey sanitises a relative key and returns the full S3 key,
|
||||
// rejecting anything that would escape the space prefix.
|
||||
func resolveRelKey(spaceIDHex, relKey string) (string, error) {
|
||||
relKey = strings.TrimLeft(strings.TrimSpace(relKey), "/")
|
||||
cleaned := path.Clean(relKey)
|
||||
if cleaned == "." || cleaned == "" {
|
||||
return "", errors.New("key is empty")
|
||||
}
|
||||
if strings.Contains(cleaned, "..") {
|
||||
return "", errors.New("invalid key")
|
||||
}
|
||||
base := spaceBase(spaceIDHex)
|
||||
full := base + cleaned
|
||||
if !strings.HasPrefix(full, base) {
|
||||
return "", errors.New("invalid key: outside space boundary")
|
||||
}
|
||||
return full, nil
|
||||
}
|
||||
|
||||
// resolveRelPrefix sanitises a relative folder prefix and returns the full S3 prefix.
|
||||
// An empty relPrefix maps to the space root folder.
|
||||
func resolveRelPrefix(spaceIDHex, relPrefix string) (string, error) {
|
||||
base := spaceBase(spaceIDHex)
|
||||
relPrefix = strings.TrimLeft(strings.TrimSpace(relPrefix), "/")
|
||||
if relPrefix == "" {
|
||||
return base, nil
|
||||
}
|
||||
cleaned := path.Clean(relPrefix)
|
||||
if cleaned == "." {
|
||||
return base, nil
|
||||
}
|
||||
if strings.Contains(cleaned, "..") {
|
||||
return "", errors.New("invalid prefix")
|
||||
}
|
||||
full := base + cleaned + "/"
|
||||
if !strings.HasPrefix(full, base) {
|
||||
return "", errors.New("invalid prefix: outside space boundary")
|
||||
}
|
||||
return full, nil
|
||||
}
|
||||
|
||||
// ListObjects returns objects and virtual folders directly under relPrefix within the space.
|
||||
// Returned keys are relative to the space root (no "spaces/<spaceId>/" prefix).
|
||||
func (s *FileService) ListObjects(ctx context.Context, userIDHex, spaceIDHex, relPrefix string) ([]*S3Object, error) {
|
||||
cfg, err := s.validateAccess(ctx, userIDHex, spaceIDHex)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
fullPrefix, err := resolveRelPrefix(spaceIDHex, relPrefix)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
base := spaceBase(spaceIDHex)
|
||||
result, err := cfg.client.ListObjectsV2(ctx, &s3.ListObjectsV2Input{
|
||||
Bucket: aws.String(cfg.bucket),
|
||||
Prefix: aws.String(fullPrefix),
|
||||
Delimiter: aws.String("/"),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
var objects []*S3Object
|
||||
|
||||
for _, cp := range result.CommonPrefixes {
|
||||
if cp.Prefix != nil {
|
||||
objects = append(objects, &S3Object{
|
||||
Key: strings.TrimPrefix(*cp.Prefix, base),
|
||||
IsFolder: true,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
for _, obj := range result.Contents {
|
||||
if obj.Key == nil || *obj.Key == fullPrefix {
|
||||
continue
|
||||
}
|
||||
// Hide virtual .keep placeholder files used for folder creation
|
||||
if path.Base(*obj.Key) == ".keep" {
|
||||
continue
|
||||
}
|
||||
size := int64(0)
|
||||
if obj.Size != nil {
|
||||
size = *obj.Size
|
||||
}
|
||||
lastMod := ""
|
||||
if obj.LastModified != nil {
|
||||
lastMod = obj.LastModified.Format(time.RFC3339)
|
||||
}
|
||||
objects = append(objects, &S3Object{
|
||||
Key: strings.TrimPrefix(*obj.Key, base),
|
||||
Size: size,
|
||||
LastModified: lastMod,
|
||||
})
|
||||
}
|
||||
|
||||
return objects, nil
|
||||
}
|
||||
|
||||
// GetObjectContent streams an S3 object, enforcing space boundary.
|
||||
// relKey is relative to the space root.
|
||||
func (s *FileService) GetObjectContent(ctx context.Context, userIDHex, spaceIDHex, relKey string) (io.ReadCloser, string, error) {
|
||||
cfg, err := s.validateAccess(ctx, userIDHex, spaceIDHex)
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
}
|
||||
|
||||
fullKey, err := resolveRelKey(spaceIDHex, relKey)
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
}
|
||||
|
||||
result, err := cfg.client.GetObject(ctx, &s3.GetObjectInput{
|
||||
Bucket: aws.String(cfg.bucket),
|
||||
Key: aws.String(fullKey),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
}
|
||||
|
||||
contentType := "application/octet-stream"
|
||||
if result.ContentType != nil {
|
||||
contentType = *result.ContentType
|
||||
}
|
||||
|
||||
return result.Body, contentType, nil
|
||||
}
|
||||
|
||||
// UploadObject stores a file at relKey within the space.
|
||||
func (s *FileService) UploadObject(ctx context.Context, userIDHex, spaceIDHex, relKey, contentType string, body io.Reader, size int64) error {
|
||||
cfg, err := s.validateAccess(ctx, userIDHex, spaceIDHex)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
fullKey, err := resolveRelKey(spaceIDHex, relKey)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if contentType == "" {
|
||||
contentType = "application/octet-stream"
|
||||
}
|
||||
|
||||
input := &s3.PutObjectInput{
|
||||
Bucket: aws.String(cfg.bucket),
|
||||
Key: aws.String(fullKey),
|
||||
Body: body,
|
||||
ContentType: aws.String(contentType),
|
||||
}
|
||||
if size > 0 {
|
||||
input.ContentLength = aws.Int64(size)
|
||||
}
|
||||
|
||||
_, err = cfg.client.PutObject(ctx, input)
|
||||
return err
|
||||
}
|
||||
|
||||
// CreateFolder creates a virtual folder by uploading a zero-byte .keep placeholder.
|
||||
func (s *FileService) CreateFolder(ctx context.Context, userIDHex, spaceIDHex, relPath string) error {
|
||||
cfg, err := s.validateAccess(ctx, userIDHex, spaceIDHex)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
base := spaceBase(spaceIDHex)
|
||||
relPath = strings.Trim(relPath, "/")
|
||||
cleaned := path.Clean(relPath)
|
||||
if cleaned == "." || cleaned == "" || strings.Contains(cleaned, "..") {
|
||||
return errors.New("invalid folder path")
|
||||
}
|
||||
fullKey := base + cleaned + "/.keep"
|
||||
if !strings.HasPrefix(fullKey, base) {
|
||||
return errors.New("invalid folder path: outside space boundary")
|
||||
}
|
||||
|
||||
zero := int64(0)
|
||||
_, err = cfg.client.PutObject(ctx, &s3.PutObjectInput{
|
||||
Bucket: aws.String(cfg.bucket),
|
||||
Key: aws.String(fullKey),
|
||||
Body: bytes.NewReader(nil),
|
||||
ContentType: aws.String("application/octet-stream"),
|
||||
ContentLength: aws.Int64(zero),
|
||||
})
|
||||
return err
|
||||
}
|
||||
|
||||
// DeleteObject removes a single object within the space.
|
||||
func (s *FileService) DeleteObject(ctx context.Context, userIDHex, spaceIDHex, relKey string) error {
|
||||
cfg, err := s.validateAccess(ctx, userIDHex, spaceIDHex)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
fullKey, err := resolveRelKey(spaceIDHex, relKey)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = cfg.client.DeleteObject(ctx, &s3.DeleteObjectInput{
|
||||
Bucket: aws.String(cfg.bucket),
|
||||
Key: aws.String(fullKey),
|
||||
})
|
||||
return err
|
||||
}
|
||||
|
||||
// DeleteFolder recursively deletes all objects under relPrefix within the space.
|
||||
func (s *FileService) DeleteFolder(ctx context.Context, userIDHex, spaceIDHex, relPrefix string) error {
|
||||
cfg, err := s.validateAccess(ctx, userIDHex, spaceIDHex)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
fullPrefix, err := resolveRelPrefix(spaceIDHex, relPrefix)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Safety net: refuse to delete the entire space root
|
||||
if fullPrefix == spaceBase(spaceIDHex) {
|
||||
return errors.New("cannot delete the space root folder")
|
||||
}
|
||||
|
||||
paginator := s3.NewListObjectsV2Paginator(cfg.client, &s3.ListObjectsV2Input{
|
||||
Bucket: aws.String(cfg.bucket),
|
||||
Prefix: aws.String(fullPrefix),
|
||||
})
|
||||
|
||||
var toDelete []types.ObjectIdentifier
|
||||
for paginator.HasMorePages() {
|
||||
page, err := paginator.NextPage(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, obj := range page.Contents {
|
||||
if obj.Key != nil {
|
||||
toDelete = append(toDelete, types.ObjectIdentifier{Key: obj.Key})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(toDelete) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Delete in batches of 1000 (S3 limit per DeleteObjects call)
|
||||
for i := 0; i < len(toDelete); i += 1000 {
|
||||
end := i + 1000
|
||||
if end > len(toDelete) {
|
||||
end = len(toDelete)
|
||||
}
|
||||
_, err := cfg.client.DeleteObjects(ctx, &s3.DeleteObjectsInput{
|
||||
Bucket: aws.String(cfg.bucket),
|
||||
Delete: &types.Delete{
|
||||
Objects: toDelete[i:end],
|
||||
Quiet: aws.Bool(true),
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@@ -39,6 +39,12 @@ type FeatureFlags struct {
|
||||
RegistrationEnabled bool `bson:"registration_enabled"`
|
||||
ProviderLoginEnabled bool `bson:"provider_login_enabled"`
|
||||
PublicSharingEnabled bool `bson:"public_sharing_enabled"`
|
||||
FileExplorerEnabled bool `bson:"file_explorer_enabled"`
|
||||
S3Endpoint string `bson:"s3_endpoint,omitempty"`
|
||||
S3Bucket string `bson:"s3_bucket,omitempty"`
|
||||
S3Region string `bson:"s3_region,omitempty"`
|
||||
S3AccessKey string `bson:"s3_access_key,omitempty"`
|
||||
S3SecretKey string `bson:"s3_secret_key,omitempty"` // AES-256-GCM encrypted
|
||||
}
|
||||
|
||||
// NewDefaultFeatureFlags returns safe defaults for a new deployment.
|
||||
@@ -47,5 +53,6 @@ func NewDefaultFeatureFlags() *FeatureFlags {
|
||||
RegistrationEnabled: true,
|
||||
ProviderLoginEnabled: true,
|
||||
PublicSharingEnabled: true,
|
||||
FileExplorerEnabled: false,
|
||||
}
|
||||
}
|
||||
|
||||
273
backend/internal/interfaces/handlers/file_handler.go
Normal file
273
backend/internal/interfaces/handlers/file_handler.go
Normal file
@@ -0,0 +1,273 @@
|
||||
package handlers
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"mime"
|
||||
"net/http"
|
||||
"path"
|
||||
"strings"
|
||||
|
||||
"github.com/gorilla/mux"
|
||||
"github.com/noteapp/backend/internal/application/services"
|
||||
"github.com/noteapp/backend/internal/interfaces/middleware"
|
||||
)
|
||||
|
||||
const maxUploadSize = 100 << 20 // 100 MB
|
||||
|
||||
// FileHandler exposes S3 file explorer endpoints scoped to spaces.
|
||||
type FileHandler struct {
|
||||
fileService *services.FileService
|
||||
}
|
||||
|
||||
// NewFileHandler creates a new FileHandler.
|
||||
func NewFileHandler(fileService *services.FileService) *FileHandler {
|
||||
return &FileHandler{fileService: fileService}
|
||||
}
|
||||
|
||||
// extractContext extracts and validates spaceId (URL) and userId (JWT context).
|
||||
func (h *FileHandler) extractContext(r *http.Request) (spaceID, userID string, err error) {
|
||||
spaceID = mux.Vars(r)["spaceId"]
|
||||
if spaceID == "" {
|
||||
return "", "", fmt.Errorf("missing spaceId")
|
||||
}
|
||||
userID, err = middleware.GetUserIDFromContext(r.Context())
|
||||
return
|
||||
}
|
||||
|
||||
// cleanKey sanitises a user-supplied relative key (strips leading slash, resolves .).
|
||||
func cleanKey(raw string) string {
|
||||
k := strings.TrimLeft(strings.TrimSpace(raw), "/")
|
||||
if c := path.Clean(k); c != "." {
|
||||
return c
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// cleanPrefix sanitises a user-supplied relative prefix.
|
||||
func cleanPrefix(raw string) string {
|
||||
p := strings.TrimLeft(strings.TrimSpace(raw), "/")
|
||||
if c := path.Clean(p); c != "." {
|
||||
return c
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
// respondError maps service errors to appropriate HTTP status codes.
|
||||
func respondError(w http.ResponseWriter, err error) {
|
||||
msg := err.Error()
|
||||
switch {
|
||||
case strings.Contains(msg, "access denied"), strings.Contains(msg, "disabled"):
|
||||
http.Error(w, msg, http.StatusForbidden)
|
||||
default:
|
||||
http.Error(w, msg, http.StatusBadRequest)
|
||||
}
|
||||
}
|
||||
|
||||
// ListFiles handles GET /api/v1/spaces/{spaceId}/files/list?prefix=
|
||||
func (h *FileHandler) ListFiles(w http.ResponseWriter, r *http.Request) {
|
||||
spaceID, userID, err := h.extractContext(r)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
relPrefix := cleanPrefix(r.URL.Query().Get("prefix"))
|
||||
objects, err := h.fileService.ListObjects(r.Context(), userID, spaceID, relPrefix)
|
||||
if err != nil {
|
||||
respondError(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
json.NewEncoder(w).Encode(map[string]interface{}{
|
||||
"objects": objects,
|
||||
"prefix": relPrefix,
|
||||
})
|
||||
}
|
||||
|
||||
// GetFile handles GET /api/v1/spaces/{spaceId}/files/object?key=
|
||||
// Also accepts ?token= as a fallback auth mechanism so markdown images render in-browser.
|
||||
func (h *FileHandler) GetFile(w http.ResponseWriter, r *http.Request) {
|
||||
spaceID, userID, err := h.extractContext(r)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
relKey := cleanKey(r.URL.Query().Get("key"))
|
||||
if relKey == "" {
|
||||
http.Error(w, "key is required", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
body, contentType, err := h.fileService.GetObjectContent(r.Context(), userID, spaceID, relKey)
|
||||
if err != nil {
|
||||
if strings.Contains(err.Error(), "access denied") {
|
||||
http.Error(w, "access denied", http.StatusForbidden)
|
||||
return
|
||||
}
|
||||
http.Error(w, "file not found", http.StatusNotFound)
|
||||
return
|
||||
}
|
||||
defer body.Close()
|
||||
|
||||
w.Header().Set("Content-Type", contentType)
|
||||
w.Header().Set("Cache-Control", "private, max-age=3600")
|
||||
io.Copy(w, body) //nolint:errcheck
|
||||
}
|
||||
|
||||
// UploadFile handles POST /api/v1/spaces/{spaceId}/files/upload (multipart/form-data)
|
||||
// Form fields:
|
||||
// - path: optional relative folder within the space (e.g. "docs/2024")
|
||||
// - files: one or more file uploads
|
||||
func (h *FileHandler) UploadFile(w http.ResponseWriter, r *http.Request) {
|
||||
spaceID, userID, err := h.extractContext(r)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
if err := r.ParseMultipartForm(maxUploadSize); err != nil {
|
||||
http.Error(w, "request too large", http.StatusRequestEntityTooLarge)
|
||||
return
|
||||
}
|
||||
|
||||
relFolder := cleanPrefix(r.FormValue("path"))
|
||||
fileHeaders := r.MultipartForm.File["files"]
|
||||
if len(fileHeaders) == 0 {
|
||||
http.Error(w, "no files provided", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
var uploaded []string
|
||||
for _, fh := range fileHeaders {
|
||||
filename := path.Base(fh.Filename)
|
||||
if filename == "." || filename == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
var relKey string
|
||||
if relFolder != "" {
|
||||
relKey = relFolder + "/" + filename
|
||||
} else {
|
||||
relKey = filename
|
||||
}
|
||||
|
||||
// Detect content-type from header then extension
|
||||
ct := fh.Header.Get("Content-Type")
|
||||
if ct == "" || ct == "application/octet-stream" {
|
||||
if ext := path.Ext(filename); ext != "" {
|
||||
if t := mime.TypeByExtension(ext); t != "" {
|
||||
ct = t
|
||||
}
|
||||
}
|
||||
}
|
||||
if ct == "" {
|
||||
ct = "application/octet-stream"
|
||||
}
|
||||
|
||||
f, err := fh.Open()
|
||||
if err != nil {
|
||||
http.Error(w, "failed to read uploaded file", http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
uploadErr := h.fileService.UploadObject(r.Context(), userID, spaceID, relKey, ct, f, fh.Size)
|
||||
f.Close()
|
||||
if uploadErr != nil {
|
||||
respondError(w, uploadErr)
|
||||
return
|
||||
}
|
||||
uploaded = append(uploaded, relKey)
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
w.WriteHeader(http.StatusCreated)
|
||||
json.NewEncoder(w).Encode(map[string]interface{}{"uploaded": uploaded})
|
||||
}
|
||||
|
||||
// CreateFolder handles POST /api/v1/spaces/{spaceId}/files/folder
|
||||
// JSON body: {"path": "new-folder-name"}
|
||||
func (h *FileHandler) CreateFolder(w http.ResponseWriter, r *http.Request) {
|
||||
spaceID, userID, err := h.extractContext(r)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
var body struct {
|
||||
Path string `json:"path"`
|
||||
}
|
||||
if err := json.NewDecoder(r.Body).Decode(&body); err != nil {
|
||||
http.Error(w, "invalid request body", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
relPath := cleanPrefix(body.Path)
|
||||
if relPath == "" {
|
||||
http.Error(w, "path is required", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
if err := h.fileService.CreateFolder(r.Context(), userID, spaceID, relPath); err != nil {
|
||||
respondError(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
w.WriteHeader(http.StatusCreated)
|
||||
json.NewEncoder(w).Encode(map[string]string{"path": relPath})
|
||||
}
|
||||
|
||||
// DeleteFile handles DELETE /api/v1/spaces/{spaceId}/files/object?key=
|
||||
func (h *FileHandler) DeleteFile(w http.ResponseWriter, r *http.Request) {
|
||||
spaceID, userID, err := h.extractContext(r)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
relKey := cleanKey(r.URL.Query().Get("key"))
|
||||
if relKey == "" {
|
||||
http.Error(w, "key is required", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
if err := h.fileService.DeleteObject(r.Context(), userID, spaceID, relKey); err != nil {
|
||||
if strings.Contains(err.Error(), "access denied") {
|
||||
http.Error(w, "access denied", http.StatusForbidden)
|
||||
return
|
||||
}
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
w.WriteHeader(http.StatusNoContent)
|
||||
}
|
||||
|
||||
// DeleteFolder handles DELETE /api/v1/spaces/{spaceId}/files/folder?prefix=
|
||||
func (h *FileHandler) DeleteFolder(w http.ResponseWriter, r *http.Request) {
|
||||
spaceID, userID, err := h.extractContext(r)
|
||||
if err != nil {
|
||||
http.Error(w, err.Error(), http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
relPrefix := cleanPrefix(r.URL.Query().Get("prefix"))
|
||||
if relPrefix == "" {
|
||||
http.Error(w, "prefix is required", http.StatusBadRequest)
|
||||
return
|
||||
}
|
||||
|
||||
if err := h.fileService.DeleteFolder(r.Context(), userID, spaceID, relPrefix); err != nil {
|
||||
if strings.Contains(err.Error(), "access denied") {
|
||||
http.Error(w, "access denied", http.StatusForbidden)
|
||||
return
|
||||
}
|
||||
http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
w.WriteHeader(http.StatusNoContent)
|
||||
}
|
||||
@@ -41,8 +41,14 @@ func (m *AuthMiddleware) Middleware(next http.Handler) http.Handler {
|
||||
return
|
||||
}
|
||||
|
||||
// Extract token from Authorization header
|
||||
// Extract token from Authorization header.
|
||||
// For GET /files/object, also accept ?token= so markdown images render in-browser.
|
||||
authHeader := r.Header.Get("Authorization")
|
||||
if authHeader == "" && r.Method == http.MethodGet && strings.HasSuffix(r.URL.Path, "/files/object") {
|
||||
if tok := r.URL.Query().Get("token"); tok != "" {
|
||||
authHeader = "Bearer " + tok
|
||||
}
|
||||
}
|
||||
if authHeader == "" {
|
||||
http.Error(w, "Missing authorization header", http.StatusUnauthorized)
|
||||
return
|
||||
|
||||
@@ -174,11 +174,12 @@
|
||||
:note="selectedNote"
|
||||
:category-options="categoryOptions"
|
||||
:can-delete="canDeleteNotes"
|
||||
:space-id="currentSpace?.id"
|
||||
@save="updateNote"
|
||||
@delete="deleteNote"
|
||||
@cancel="cancelEditingNote"
|
||||
/>
|
||||
<NoteViewer v-else-if="selectedNote" :note="selectedNote" :category-options="categoryOptions" />
|
||||
<NoteViewer v-else-if="selectedNote" :note="selectedNote" :category-options="categoryOptions" :space-id="currentSpace?.id" />
|
||||
<NoteList
|
||||
v-else
|
||||
:notes="displayedNotes"
|
||||
|
||||
331
frontend/src/components/FileExplorer.vue
Normal file
331
frontend/src/components/FileExplorer.vue
Normal file
@@ -0,0 +1,331 @@
|
||||
<template>
|
||||
<div
|
||||
class="file-explorer d-flex flex-column border rounded"
|
||||
style="min-height: 300px"
|
||||
@dragover.prevent="dragOver = true"
|
||||
@dragleave="dragOver = false"
|
||||
@drop.prevent="handleDrop"
|
||||
:class="{ 'drag-active': dragOver }"
|
||||
>
|
||||
<!-- Breadcrumb toolbar -->
|
||||
<div class="file-explorer-header px-2 py-1 border-bottom bg-light d-flex align-items-center gap-1 flex-wrap">
|
||||
<i class="mdi mdi-folder-network-outline text-muted me-1" aria-hidden="true"></i>
|
||||
<button class="btn btn-link btn-sm p-0 text-decoration-none text-dark" @click="navigateTo('')">Space Files</button>
|
||||
<template v-for="(seg, idx) in breadcrumbs" :key="idx">
|
||||
<span class="text-muted">/</span>
|
||||
<button class="btn btn-link btn-sm p-0 text-decoration-none text-dark" @click="navigateTo(seg.prefix)">{{ seg.name }}</button>
|
||||
</template>
|
||||
<div class="ms-auto d-flex gap-1">
|
||||
<button class="btn btn-sm btn-outline-secondary py-0 px-1" title="Upload files" @click="fileInputRef.click()">
|
||||
<i class="mdi mdi-upload" aria-hidden="true"></i>
|
||||
</button>
|
||||
<button class="btn btn-sm btn-outline-secondary py-0 px-1" title="New folder" @click="showNewFolderInput = !showNewFolderInput">
|
||||
<i class="mdi mdi-folder-plus-outline" aria-hidden="true"></i>
|
||||
</button>
|
||||
<button class="btn btn-sm btn-link p-0 text-muted" title="Refresh" @click="loadFiles">
|
||||
<i class="mdi mdi-refresh" aria-hidden="true"></i>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- New folder input -->
|
||||
<div v-if="showNewFolderInput" class="px-2 py-1 border-bottom bg-white d-flex gap-1">
|
||||
<input
|
||||
ref="newFolderInputRef"
|
||||
v-model="newFolderName"
|
||||
type="text"
|
||||
class="form-control form-control-sm"
|
||||
placeholder="Folder name"
|
||||
@keyup.enter="createFolder"
|
||||
@keyup.esc="showNewFolderInput = false"
|
||||
/>
|
||||
<button class="btn btn-sm btn-primary" @click="createFolder">Create</button>
|
||||
<button class="btn btn-sm btn-secondary" @click="showNewFolderInput = false">Cancel</button>
|
||||
</div>
|
||||
|
||||
<!-- Upload progress -->
|
||||
<div v-if="uploading" class="px-2 py-1 bg-light border-bottom">
|
||||
<div class="d-flex align-items-center gap-2">
|
||||
<div class="progress flex-grow-1" style="height: 6px">
|
||||
<div class="progress-bar progress-bar-striped progress-bar-animated" :style="{ width: uploadProgress + '%' }"></div>
|
||||
</div>
|
||||
<span class="text-muted" style="font-size: 0.7rem">{{ uploadProgress }}%</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Error message -->
|
||||
<div v-if="error" class="alert alert-danger alert-sm m-1 p-1 small mb-0" role="alert">
|
||||
<i class="mdi mdi-alert-circle-outline me-1" aria-hidden="true"></i>{{ error }}
|
||||
<button type="button" class="btn-close float-end" style="font-size: 0.6rem" @click="error = ''"></button>
|
||||
</div>
|
||||
|
||||
<!-- Loading / empty -->
|
||||
<div v-if="loading" class="p-3 text-muted text-center small flex-grow-1"><i class="mdi mdi-loading mdi-spin me-1" aria-hidden="true"></i> Loading...</div>
|
||||
<div v-else-if="!error && objects.length === 0" class="p-3 text-muted text-center small flex-grow-1">
|
||||
<i class="mdi mdi-cloud-upload-outline d-block mb-1" style="font-size: 1.5rem" aria-hidden="true"></i>
|
||||
Drop files here or click Upload
|
||||
</div>
|
||||
|
||||
<!-- File list -->
|
||||
<div v-else class="file-list flex-grow-1 overflow-auto">
|
||||
<div
|
||||
v-for="obj in objects"
|
||||
:key="obj.key"
|
||||
class="file-item d-flex align-items-center gap-1 px-2 py-1"
|
||||
:title="obj.is_folder ? 'Open folder' : 'Insert into note'"
|
||||
@click="handleClick(obj)"
|
||||
>
|
||||
<i :class="fileIcon(obj)" style="font-size: 1rem; width: 1.1rem; flex-shrink: 0" aria-hidden="true"></i>
|
||||
<span class="flex-grow-1 text-truncate" style="font-size: 0.82rem">{{ displayName(obj) }}</span>
|
||||
<span v-if="!obj.is_folder && obj.size > 0" class="text-muted flex-shrink-0" style="font-size: 0.68rem">{{ formatSize(obj.size) }}</span>
|
||||
<button class="btn-delete btn btn-sm btn-link p-0 text-danger ms-1" :title="obj.is_folder ? 'Delete folder' : 'Delete file'" @click.stop="deleteItem(obj)">
|
||||
<i class="mdi mdi-trash-can-outline" style="font-size: 0.85rem" aria-hidden="true"></i>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Hidden file input -->
|
||||
<input ref="fileInputRef" type="file" multiple class="d-none" @change="handleFilePick" />
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<script setup>
|
||||
import { ref, computed, watch, nextTick } from "vue";
|
||||
import apiClient from "../services/apiClient";
|
||||
|
||||
const props = defineProps({
|
||||
spaceId: {
|
||||
type: String,
|
||||
required: true,
|
||||
},
|
||||
modelValue: {
|
||||
type: String,
|
||||
default: "",
|
||||
},
|
||||
});
|
||||
|
||||
const emit = defineEmits(["insert", "update:modelValue"]);
|
||||
|
||||
const objects = ref([]);
|
||||
const loading = ref(false);
|
||||
const error = ref("");
|
||||
const currentPrefix = ref(props.modelValue || "");
|
||||
const dragOver = ref(false);
|
||||
const uploading = ref(false);
|
||||
const uploadProgress = ref(0);
|
||||
const showNewFolderInput = ref(false);
|
||||
const newFolderName = ref("");
|
||||
const fileInputRef = ref(null);
|
||||
const newFolderInputRef = ref(null);
|
||||
|
||||
const breadcrumbs = computed(() => {
|
||||
if (!currentPrefix.value) return [];
|
||||
const parts = currentPrefix.value.replace(/\/$/, "").split("/").filter(Boolean);
|
||||
return parts.map((name, i) => ({
|
||||
name,
|
||||
prefix: parts.slice(0, i + 1).join("/"),
|
||||
}));
|
||||
});
|
||||
|
||||
const loadFiles = async () => {
|
||||
if (!props.spaceId) return;
|
||||
loading.value = true;
|
||||
error.value = "";
|
||||
try {
|
||||
const res = await apiClient.get(`/api/v1/spaces/${props.spaceId}/files/list`, {
|
||||
params: { prefix: currentPrefix.value },
|
||||
});
|
||||
objects.value = res.data.objects || [];
|
||||
} catch (e) {
|
||||
error.value = e.response?.data || "Failed to load files";
|
||||
} finally {
|
||||
loading.value = false;
|
||||
}
|
||||
};
|
||||
|
||||
const navigateTo = (prefix) => {
|
||||
currentPrefix.value = prefix;
|
||||
emit("update:modelValue", prefix);
|
||||
loadFiles();
|
||||
};
|
||||
|
||||
const handleClick = (obj) => {
|
||||
if (obj.is_folder) {
|
||||
navigateTo(obj.key.replace(/\/$/, ""));
|
||||
return;
|
||||
}
|
||||
const url = `/api/v1/spaces/${props.spaceId}/files/object?key=${encodeURIComponent(obj.key)}`;
|
||||
const name = displayName(obj);
|
||||
const ext = name.split(".").pop().toLowerCase();
|
||||
const imageExts = ["jpg", "jpeg", "png", "gif", "webp", "svg", "bmp", "avif"];
|
||||
const snippet = imageExts.includes(ext) ? `` : `[${name}](${url})`;
|
||||
emit("insert", snippet);
|
||||
};
|
||||
|
||||
const handleFilePick = (e) => {
|
||||
const files = Array.from(e.target.files || []);
|
||||
if (files.length > 0) uploadFiles(files);
|
||||
e.target.value = "";
|
||||
};
|
||||
|
||||
const handleDrop = (e) => {
|
||||
dragOver.value = false;
|
||||
const files = Array.from(e.dataTransfer?.files || []);
|
||||
if (files.length > 0) uploadFiles(files);
|
||||
};
|
||||
|
||||
const uploadFiles = async (files) => {
|
||||
if (!props.spaceId || files.length === 0) return;
|
||||
uploading.value = true;
|
||||
uploadProgress.value = 0;
|
||||
error.value = "";
|
||||
|
||||
const form = new FormData();
|
||||
form.append("path", currentPrefix.value);
|
||||
for (const f of files) form.append("files", f);
|
||||
|
||||
try {
|
||||
await apiClient.post(`/api/v1/spaces/${props.spaceId}/files/upload`, form, {
|
||||
headers: { "Content-Type": "multipart/form-data" },
|
||||
onUploadProgress: (e) => {
|
||||
uploadProgress.value = e.total ? Math.round((e.loaded * 100) / e.total) : 50;
|
||||
},
|
||||
});
|
||||
await loadFiles();
|
||||
} catch (e) {
|
||||
error.value = e.response?.data || "Upload failed";
|
||||
} finally {
|
||||
uploading.value = false;
|
||||
uploadProgress.value = 0;
|
||||
}
|
||||
};
|
||||
|
||||
const createFolder = async () => {
|
||||
const name = newFolderName.value.trim();
|
||||
if (!name || !props.spaceId) return;
|
||||
const path = currentPrefix.value ? `${currentPrefix.value}/${name}` : name;
|
||||
error.value = "";
|
||||
try {
|
||||
await apiClient.post(`/api/v1/spaces/${props.spaceId}/files/folder`, { path });
|
||||
newFolderName.value = "";
|
||||
showNewFolderInput.value = false;
|
||||
await loadFiles();
|
||||
} catch (e) {
|
||||
error.value = e.response?.data || "Failed to create folder";
|
||||
}
|
||||
};
|
||||
|
||||
const deleteItem = async (obj) => {
|
||||
const label = displayName(obj);
|
||||
if (!confirm(`Delete "${label}"?${obj.is_folder ? "\n\nThis will delete all files inside the folder." : ""}`)) return;
|
||||
error.value = "";
|
||||
try {
|
||||
if (obj.is_folder) {
|
||||
const prefix = obj.key.replace(/\/$/, "");
|
||||
await apiClient.delete(`/api/v1/spaces/${props.spaceId}/files/folder`, { params: { prefix } });
|
||||
} else {
|
||||
await apiClient.delete(`/api/v1/spaces/${props.spaceId}/files/object`, { params: { key: obj.key } });
|
||||
}
|
||||
await loadFiles();
|
||||
} catch (e) {
|
||||
error.value = e.response?.data || "Delete failed";
|
||||
}
|
||||
};
|
||||
|
||||
const displayName = (obj) => {
|
||||
const key = obj.is_folder ? obj.key.replace(/\/$/, "") : obj.key;
|
||||
return key.split("/").pop() || key;
|
||||
};
|
||||
|
||||
const fileIcon = (obj) => {
|
||||
if (obj.is_folder) return "mdi mdi-folder text-warning";
|
||||
const ext = displayName(obj).split(".").pop().toLowerCase();
|
||||
if (["jpg", "jpeg", "png", "gif", "webp", "svg", "bmp", "avif"].includes(ext)) return "mdi mdi-file-image text-info";
|
||||
if (["pdf"].includes(ext)) return "mdi mdi-file-pdf-box text-danger";
|
||||
if (["doc", "docx", "odt"].includes(ext)) return "mdi mdi-file-word text-primary";
|
||||
if (["xls", "xlsx", "ods"].includes(ext)) return "mdi mdi-file-excel text-success";
|
||||
if (["zip", "tar", "gz", "rar", "7z"].includes(ext)) return "mdi mdi-folder-zip text-secondary";
|
||||
if (["mp4", "mov", "avi", "mkv", "webm"].includes(ext)) return "mdi mdi-file-video";
|
||||
if (["mp3", "wav", "ogg", "flac"].includes(ext)) return "mdi mdi-file-music";
|
||||
if (["js", "ts", "py", "go", "java", "c", "cpp", "rs", "html", "css", "json", "yaml", "yml", "sh"].includes(ext)) return "mdi mdi-file-code text-success";
|
||||
return "mdi mdi-file-outline text-muted";
|
||||
};
|
||||
|
||||
const formatSize = (bytes) => {
|
||||
if (bytes < 1024) return `${bytes} B`;
|
||||
if (bytes < 1048576) return `${(bytes / 1024).toFixed(1)} KB`;
|
||||
return `${(bytes / 1048576).toFixed(1)} MB`;
|
||||
};
|
||||
|
||||
// Load on mount and when spaceId or prefix changes from parent
|
||||
watch(
|
||||
() => props.spaceId,
|
||||
(v) => {
|
||||
if (v) loadFiles();
|
||||
},
|
||||
{ immediate: true },
|
||||
);
|
||||
|
||||
watch(
|
||||
() => props.modelValue,
|
||||
(val) => {
|
||||
if (val !== currentPrefix.value) {
|
||||
currentPrefix.value = val || "";
|
||||
loadFiles();
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
watch(showNewFolderInput, async (v) => {
|
||||
if (v) {
|
||||
await nextTick();
|
||||
newFolderInputRef.value?.focus();
|
||||
}
|
||||
});
|
||||
</script>
|
||||
|
||||
<style scoped>
|
||||
.file-explorer {
|
||||
background: #fff;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.file-explorer-header {
|
||||
font-size: 0.8rem;
|
||||
min-height: 36px;
|
||||
}
|
||||
|
||||
.file-list {
|
||||
max-height: 480px;
|
||||
}
|
||||
|
||||
.file-item {
|
||||
border-bottom: 1px solid #f0f0f0;
|
||||
cursor: pointer;
|
||||
transition: background 0.1s;
|
||||
color: #333;
|
||||
line-height: 1.3;
|
||||
}
|
||||
|
||||
.file-item:last-child {
|
||||
border-bottom: none;
|
||||
}
|
||||
|
||||
.file-item:hover {
|
||||
background-color: #f0f4ff;
|
||||
}
|
||||
|
||||
.drag-active {
|
||||
outline: 2px dashed #0d6efd;
|
||||
outline-offset: -2px;
|
||||
}
|
||||
|
||||
.btn-delete {
|
||||
opacity: 0;
|
||||
transition: opacity 0.1s;
|
||||
}
|
||||
|
||||
.file-item:hover .btn-delete {
|
||||
opacity: 1;
|
||||
}
|
||||
</style>
|
||||
@@ -4,6 +4,16 @@
|
||||
<button class="btn btn-sm btn-primary" @click="saveNote">Save</button>
|
||||
<button v-if="canDelete" class="btn btn-sm btn-danger ms-2" @click="confirmDelete">Delete</button>
|
||||
<button class="btn btn-sm btn-outline-secondary ms-2" @click="emit('cancel')">Cancel</button>
|
||||
<button
|
||||
v-if="fileExplorerEnabled"
|
||||
class="btn btn-sm ms-2"
|
||||
:class="showFileExplorer ? 'btn-secondary' : 'btn-outline-secondary'"
|
||||
:title="showFileExplorer ? 'Hide file explorer' : 'Browse & insert files'"
|
||||
@click="showFileExplorer = !showFileExplorer"
|
||||
>
|
||||
<i class="mdi mdi-folder-open-outline me-1" aria-hidden="true"></i>
|
||||
Files
|
||||
</button>
|
||||
<span class="save-status ms-auto" :class="saveState">{{ saveStatusLabel }}</span>
|
||||
</div>
|
||||
|
||||
@@ -16,15 +26,19 @@
|
||||
</div>
|
||||
|
||||
<div class="row">
|
||||
<div class="col-12 col-md-6">
|
||||
<textarea v-model="editingNote.content" class="form-control editor-textarea" placeholder="Write your note in markdown..." @input="autoSave"></textarea>
|
||||
<div :class="showFileExplorer ? 'col-12 col-md-5' : 'col-12 col-md-6'">
|
||||
<textarea ref="contentTextareaRef" v-model="editingNote.content" class="form-control editor-textarea" placeholder="Write your note in markdown..." @input="autoSave"></textarea>
|
||||
</div>
|
||||
|
||||
<div class="col-12 col-md-6 mt-3 mt-md-0">
|
||||
<div :class="showFileExplorer ? 'col-12 col-md-4 mt-3 mt-md-0' : 'col-12 col-md-6 mt-3 mt-md-0'">
|
||||
<div class="preview-pane border rounded p-3">
|
||||
<div v-html="renderedMarkdown"></div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div v-if="showFileExplorer" class="col-12 col-md-3 mt-3 mt-md-0">
|
||||
<FileExplorer v-model="fileExplorerPrefix" :space-id="spaceId" @insert="insertAtCursor" />
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="mt-3">
|
||||
@@ -73,10 +87,13 @@
|
||||
</template>
|
||||
|
||||
<script setup>
|
||||
import { ref, computed, watch, onBeforeUnmount, onMounted } from "vue";
|
||||
import { ref, computed, watch, onBeforeUnmount, onMounted, nextTick } from "vue";
|
||||
import { marked } from "marked";
|
||||
import DOMPurify from "dompurify";
|
||||
import { useSettingsStore } from "../stores/settingsStore";
|
||||
import { useAuthStore } from "../stores/authStore";
|
||||
import { preprocessMarkdown } from "../utils/markdown.js";
|
||||
import FileExplorer from "./FileExplorer.vue";
|
||||
|
||||
const props = defineProps({
|
||||
note: {
|
||||
@@ -91,13 +108,22 @@ const props = defineProps({
|
||||
type: Boolean,
|
||||
default: true,
|
||||
},
|
||||
spaceId: {
|
||||
type: String,
|
||||
default: "",
|
||||
},
|
||||
});
|
||||
|
||||
const emit = defineEmits(["save", "delete", "cancel"]);
|
||||
const settingsStore = useSettingsStore();
|
||||
const authStore = useAuthStore();
|
||||
const publicSharingEnabled = ref(true);
|
||||
const fileExplorerEnabled = computed(() => settingsStore.fileExplorerEnabled);
|
||||
|
||||
const editingNote = ref({ ...props.note });
|
||||
const contentTextareaRef = ref(null);
|
||||
const showFileExplorer = ref(false);
|
||||
const fileExplorerPrefix = ref("");
|
||||
const tagsInput = ref(props.note.tags?.join(", ") || "");
|
||||
const passwordAction = ref("keep");
|
||||
const notePassword = ref("");
|
||||
@@ -106,8 +132,18 @@ const saveState = ref("saved");
|
||||
const saveStateTimeout = ref(null);
|
||||
|
||||
const renderedMarkdown = computed(() => {
|
||||
const html = marked.parse(editingNote.value.content || "");
|
||||
return DOMPurify.sanitize(html);
|
||||
const html = marked.parse(preprocessMarkdown(editingNote.value.content || ""));
|
||||
let clean = DOMPurify.sanitize(html);
|
||||
// Inject access token into space file API URLs so images render without a separate JS fetch
|
||||
const token = authStore.accessToken;
|
||||
if (token && props.spaceId) {
|
||||
clean = clean.replace(/((?:src|href)=["'])([^"']*\/api\/v1\/spaces\/[^"']*\/files\/object[^"']*)(["'])/g, (_, attr, url, quote) => {
|
||||
if (url.includes("token=")) return attr + url + quote;
|
||||
const sep = url.includes("?") ? "&" : "?";
|
||||
return `${attr}${url}${sep}token=${encodeURIComponent(token)}${quote}`;
|
||||
});
|
||||
}
|
||||
return clean;
|
||||
});
|
||||
|
||||
const saveStatusLabel = computed(() => {
|
||||
@@ -197,6 +233,27 @@ const confirmDelete = () => {
|
||||
}
|
||||
};
|
||||
|
||||
/** Insert markdown snippet at the textarea cursor position. */
|
||||
const insertAtCursor = (snippet) => {
|
||||
const textarea = contentTextareaRef.value;
|
||||
if (!textarea) {
|
||||
editingNote.value.content = (editingNote.value.content || "") + snippet;
|
||||
autoSave();
|
||||
return;
|
||||
}
|
||||
const start = textarea.selectionStart ?? editingNote.value.content?.length ?? 0;
|
||||
const end = textarea.selectionEnd ?? start;
|
||||
const before = (editingNote.value.content || "").substring(0, start);
|
||||
const after = (editingNote.value.content || "").substring(end);
|
||||
editingNote.value.content = before + snippet + after;
|
||||
autoSave();
|
||||
nextTick(() => {
|
||||
const newPos = start + snippet.length;
|
||||
textarea.setSelectionRange(newPos, newPos);
|
||||
textarea.focus();
|
||||
});
|
||||
};
|
||||
|
||||
onBeforeUnmount(() => {
|
||||
clearTimeout(saveTimeout.value);
|
||||
clearTimeout(saveStateTimeout.value);
|
||||
|
||||
@@ -32,6 +32,8 @@
|
||||
import { computed } from "vue";
|
||||
import { marked } from "marked";
|
||||
import DOMPurify from "dompurify";
|
||||
import { useAuthStore } from "../stores/authStore";
|
||||
import { preprocessMarkdown } from "../utils/markdown.js";
|
||||
|
||||
const props = defineProps({
|
||||
note: {
|
||||
@@ -42,11 +44,26 @@ const props = defineProps({
|
||||
type: Array,
|
||||
default: () => [],
|
||||
},
|
||||
spaceId: {
|
||||
type: String,
|
||||
default: "",
|
||||
},
|
||||
});
|
||||
|
||||
const authStore = useAuthStore();
|
||||
|
||||
const renderedMarkdown = computed(() => {
|
||||
const html = marked.parse(props.note.content || "");
|
||||
return DOMPurify.sanitize(html);
|
||||
const html = marked.parse(preprocessMarkdown(props.note.content || ""));
|
||||
let clean = DOMPurify.sanitize(html);
|
||||
const token = authStore.accessToken;
|
||||
if (token && props.spaceId) {
|
||||
clean = clean.replace(/((?:src|href)=["'])([^"']*\/api\/v1\/spaces\/[^"']*\/files\/object[^"']*)(["'])/g, (_, attr, url, quote) => {
|
||||
if (url.includes("token=")) return attr + url + quote;
|
||||
const sep = url.includes("?") ? "&" : "?";
|
||||
return `${attr}${url}${sep}token=${encodeURIComponent(token)}${quote}`;
|
||||
});
|
||||
}
|
||||
return clean;
|
||||
});
|
||||
|
||||
const categoryLabel = computed(() => {
|
||||
|
||||
@@ -254,6 +254,49 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="feature-flag-item border rounded p-3">
|
||||
<div class="d-flex justify-content-between align-items-center mb-0" :class="{ 'mb-3': featureFlagsForm.file_explorer_enabled }">
|
||||
<div>
|
||||
<div class="fw-semibold">Enable File Explorer</div>
|
||||
<div class="small text-muted">Allow users to browse and insert files from an S3 bucket directly into notes.</div>
|
||||
</div>
|
||||
<div class="form-check form-switch m-0">
|
||||
<input id="flag-file-explorer" v-model="featureFlagsForm.file_explorer_enabled" class="form-check-input" type="checkbox" />
|
||||
</div>
|
||||
</div>
|
||||
<div v-if="featureFlagsForm.file_explorer_enabled" class="row g-2 mt-1">
|
||||
<div class="col-md-6">
|
||||
<label class="form-label small mb-1">S3 Endpoint URL</label>
|
||||
<input v-model="featureFlagsForm.s3_endpoint" type="url" class="form-control form-control-sm" placeholder="https://s3.amazonaws.com or custom endpoint" />
|
||||
</div>
|
||||
<div class="col-md-6">
|
||||
<label class="form-label small mb-1">Bucket Name</label>
|
||||
<input v-model="featureFlagsForm.s3_bucket" type="text" class="form-control form-control-sm" placeholder="my-bucket" />
|
||||
</div>
|
||||
<div class="col-md-4">
|
||||
<label class="form-label small mb-1">Region</label>
|
||||
<input v-model="featureFlagsForm.s3_region" type="text" class="form-control form-control-sm" placeholder="us-east-1" />
|
||||
</div>
|
||||
<div class="col-md-4">
|
||||
<label class="form-label small mb-1">Access Key</label>
|
||||
<input v-model="featureFlagsForm.s3_access_key" type="text" class="form-control form-control-sm" autocomplete="off" />
|
||||
</div>
|
||||
<div class="col-md-4">
|
||||
<label class="form-label small mb-1">Secret Key</label>
|
||||
<input
|
||||
v-model="featureFlagsForm.s3_secret_key"
|
||||
type="password"
|
||||
class="form-control form-control-sm"
|
||||
:placeholder="featureFlagsForm.s3_secret_key_set ? 'Leave blank to keep current secret' : 'Enter secret key'"
|
||||
autocomplete="new-password"
|
||||
/>
|
||||
<div v-if="featureFlagsForm.s3_secret_key_set && !featureFlagsForm.s3_secret_key" class="small text-success mt-1">
|
||||
<i class="mdi mdi-check-circle-outline" aria-hidden="true"></i> Secret key is set
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="d-flex justify-content-end">
|
||||
<button class="btn btn-primary" :disabled="savingFeatureFlags" @click="saveFeatureFlags">
|
||||
{{ savingFeatureFlags ? "Saving..." : "Save Feature Flags" }}
|
||||
@@ -364,6 +407,13 @@ const featureFlagsForm = ref({
|
||||
registration_enabled: true,
|
||||
provider_login_enabled: true,
|
||||
public_sharing_enabled: true,
|
||||
file_explorer_enabled: false,
|
||||
s3_endpoint: "",
|
||||
s3_bucket: "",
|
||||
s3_region: "",
|
||||
s3_access_key: "",
|
||||
s3_secret_key: "",
|
||||
s3_secret_key_set: false,
|
||||
});
|
||||
|
||||
const clearMessages = () => {
|
||||
@@ -584,6 +634,13 @@ const loadFeatureFlags = async () => {
|
||||
registration_enabled: !!res.data.registration_enabled,
|
||||
provider_login_enabled: !!res.data.provider_login_enabled,
|
||||
public_sharing_enabled: !!res.data.public_sharing_enabled,
|
||||
file_explorer_enabled: !!res.data.file_explorer_enabled,
|
||||
s3_endpoint: res.data.s3_endpoint || "",
|
||||
s3_bucket: res.data.s3_bucket || "",
|
||||
s3_region: res.data.s3_region || "",
|
||||
s3_access_key: res.data.s3_access_key || "",
|
||||
s3_secret_key: "", // never pre-fill the secret
|
||||
s3_secret_key_set: !!res.data.s3_secret_key_set,
|
||||
};
|
||||
} catch (e) {
|
||||
error.value = e.response?.data || "Failed to load feature flags.";
|
||||
@@ -596,11 +653,28 @@ const saveFeatureFlags = async () => {
|
||||
savingFeatureFlags.value = true;
|
||||
clearMessages();
|
||||
try {
|
||||
const res = await apiClient.put("/api/v1/admin/feature-flags", featureFlagsForm.value);
|
||||
const res = await apiClient.put("/api/v1/admin/feature-flags", {
|
||||
registration_enabled: featureFlagsForm.value.registration_enabled,
|
||||
provider_login_enabled: featureFlagsForm.value.provider_login_enabled,
|
||||
public_sharing_enabled: featureFlagsForm.value.public_sharing_enabled,
|
||||
file_explorer_enabled: featureFlagsForm.value.file_explorer_enabled,
|
||||
s3_endpoint: featureFlagsForm.value.s3_endpoint,
|
||||
s3_bucket: featureFlagsForm.value.s3_bucket,
|
||||
s3_region: featureFlagsForm.value.s3_region,
|
||||
s3_access_key: featureFlagsForm.value.s3_access_key,
|
||||
s3_secret_key: featureFlagsForm.value.s3_secret_key, // blank = keep existing
|
||||
});
|
||||
featureFlagsForm.value = {
|
||||
registration_enabled: !!res.data.registration_enabled,
|
||||
provider_login_enabled: !!res.data.provider_login_enabled,
|
||||
public_sharing_enabled: !!res.data.public_sharing_enabled,
|
||||
file_explorer_enabled: !!res.data.file_explorer_enabled,
|
||||
s3_endpoint: res.data.s3_endpoint || "",
|
||||
s3_bucket: res.data.s3_bucket || "",
|
||||
s3_region: res.data.s3_region || "",
|
||||
s3_access_key: res.data.s3_access_key || "",
|
||||
s3_secret_key: "",
|
||||
s3_secret_key_set: !!res.data.s3_secret_key_set,
|
||||
};
|
||||
successMessage.value = "Feature flags updated.";
|
||||
} catch (e) {
|
||||
|
||||
@@ -6,6 +6,7 @@ const DEFAULT_FLAGS = {
|
||||
registration_enabled: true,
|
||||
provider_login_enabled: true,
|
||||
public_sharing_enabled: true,
|
||||
file_explorer_enabled: false,
|
||||
};
|
||||
|
||||
export const useSettingsStore = defineStore("settings", () => {
|
||||
@@ -15,6 +16,7 @@ export const useSettingsStore = defineStore("settings", () => {
|
||||
const registrationEnabled = computed(() => !!featureFlags.value.registration_enabled);
|
||||
const providerLoginEnabled = computed(() => !!featureFlags.value.provider_login_enabled);
|
||||
const publicSharingEnabled = computed(() => !!featureFlags.value.public_sharing_enabled);
|
||||
const fileExplorerEnabled = computed(() => !!featureFlags.value.file_explorer_enabled);
|
||||
|
||||
const loadFeatureFlags = async (force = false) => {
|
||||
if (flagsLoaded.value && !force) {
|
||||
@@ -42,6 +44,7 @@ export const useSettingsStore = defineStore("settings", () => {
|
||||
registrationEnabled,
|
||||
providerLoginEnabled,
|
||||
publicSharingEnabled,
|
||||
fileExplorerEnabled,
|
||||
loadFeatureFlags,
|
||||
};
|
||||
});
|
||||
|
||||
29
frontend/src/utils/markdown.js
Normal file
29
frontend/src/utils/markdown.js
Normal file
@@ -0,0 +1,29 @@
|
||||
/**
|
||||
* Preprocesses markdown content to support extended image size syntax:
|
||||
*
|
||||
* 
|
||||
* 
|
||||
*
|
||||
* WIDTH and HEIGHT are pixel values or percentages (e.g. 50%).
|
||||
* Either can be omitted:
|
||||
* =200x → width 200 only
|
||||
* =x150 → height 150 only
|
||||
*
|
||||
* The syntax is transformed into a plain <img> tag before passing to marked
|
||||
* because CommonMark terminates the link destination at whitespace, making it
|
||||
* impossible for marked to see the size spec otherwise.
|
||||
*/
|
||||
export function preprocessMarkdown(content) {
|
||||
if (!content) return content;
|
||||
return content.replace(
|
||||
/!\[([^\]]*)\]\(([^\s)"]+)(?:\s+"([^"]*)")?\s+=(\d*%?)[xX](\d*%?)\)/gi,
|
||||
(_, alt, url, title, w, h) => {
|
||||
const safeAlt = alt.replace(/"/g, """);
|
||||
let attrs = `src="${url}" alt="${safeAlt}"`;
|
||||
if (title) attrs += ` title="${title.replace(/"/g, """)}"`;
|
||||
if (w) attrs += ` width="${w}"`;
|
||||
if (h) attrs += ` height="${h}"`;
|
||||
return `<img ${attrs}>`;
|
||||
},
|
||||
);
|
||||
}
|
||||
Reference in New Issue
Block a user