Skip to content

Commit f040aea

Browse files
committed
update: 💾 minio support added
1 parent c03dfcd commit f040aea

12 files changed

Lines changed: 297 additions & 59 deletions

File tree

Dockerfile renamed to api.Dockerfile

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -3,11 +3,11 @@ LABEL authors="jiisanda"
33

44
WORKDIR /usr/src/app
55

6-
COPY requirements.txt ./
6+
COPY requirements/api.txt ./
77

88
RUN pip install --upgrade pip
9-
RUN pip install --no-cache-dir -r requirements.txt
9+
RUN pip install --no-cache-dir -r api.txt # Fix the path here too
1010

1111
COPY . .
1212

13-
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]
13+
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]

app/api/dependencies/repositories.py

Lines changed: 23 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,7 @@
11
import os.path
22
import re
3+
from typing import Optional
4+
35
import ulid
46

57
from fastapi import Depends
@@ -35,20 +37,30 @@ def _get_repository(session: AsyncSession = Depends(get_db)):
3537

3638

3739
async def get_s3_url(key: str) -> str:
40+
if settings.s3_endpoint_url:
41+
# minio URL format
42+
return f"{settings.s3_endpoint_url}/{settings.s3_bucket}/{key}"
3843
return f"https://{settings.s3_bucket}.s3.{settings.aws_region}.amazonaws.com/{key}"
3944

4045

41-
async def get_key(s3_url: str) -> str:
42-
43-
pattern = (
44-
f"https://{settings.s3_bucket}"
45-
+ r"\.s3\."
46-
+ settings.aws_region
47-
+ r"\.amazonaws\.com/"
48-
+ r"(.+)"
49-
)
50-
if match := re.search(pattern, s3_url):
51-
return match[1]
46+
async def get_key(s3_url: str) -> Optional[str]:
47+
if settings.s3_endpoint_url:
48+
# minio url format: http://host:9000/bucket/key
49+
# remove the endpoint and bucket form the URL
50+
url_without_endpoint = s3_url.replace(settings.s3_endpoint_url, "")
51+
url_without_bucket = url_without_endpoint.replace(f"/{settings.s3_bucket}/", "")
52+
return url_without_bucket.lstrip("/")
53+
else:
54+
pattern = (
55+
f"https://{settings.s3_bucket}"
56+
+ r"\.s3\."
57+
+ settings.aws_region
58+
+ r"\.amazonaws\.com/"
59+
+ r"(.+)"
60+
)
61+
if match := re.search(pattern, s3_url):
62+
return match[1]
63+
return None
5264

5365

5466
def get_ulid():

app/core/config.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,9 +26,11 @@ class GlobalConfig(BaseSettings):
2626
postgres_port: int = int(os.environ.get("POSTGRES_PORT"))
2727
postgres_db: str = os.environ.get("POSTGRES_DB")
2828
db_echo_log: bool = str(os.environ.get("DEBUG", "False")).lower() == "true"
29+
# s3 / minio configurations
2930
aws_access_key_id: str = os.environ.get("AWS_ACCESS_KEY_ID")
3031
aws_secret_key: str = os.environ.get("AWS_SECRET_ACCESS_KEY")
31-
aws_region: str = os.environ.get("AWS_REGION")
32+
aws_region: str = os.environ.get("AWS_REGION", "us-east-1") # minio doesn't care about a region
33+
s3_endpoint_url: str = os.environ.get("S3_ENDPOINT_URL")
3234
s3_bucket: str = os.environ.get("S3_BUCKET")
3335
s3_test_bucket: str = os.environ.get("S3_TEST_BUCKET")
3436
# user config

app/db/repositories/documents/documents.py

Lines changed: 20 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -34,12 +34,25 @@ async def perm_delete(
3434
class DocumentRepository:
3535

3636
def __init__(self):
37-
self.s3_client = boto3.resource("s3")
38-
self.client = boto3.client("s3")
37+
boto3_config = {
38+
"aws_access_key_id": settings.aws_access_key_id,
39+
"aws_secret_access_key": settings.aws_secret_key,
40+
"region_name": settings.aws_region,
41+
}
42+
43+
if settings.s3_endpoint_url:
44+
boto3_config["endpoint_url"] = settings.s3_endpoint_url
45+
46+
self.s3_client = boto3.resource("s3", **boto3_config)
47+
self.client = boto3.client("s3", **boto3_config)
3948
self.s3_bucket = self.s3_client.Bucket(settings.s3_bucket)
40-
self.client.put_bucket_versioning(
41-
Bucket=settings.s3_bucket, VersioningConfiguration={"Status": "Enabled"}
42-
)
49+
try:
50+
self.client.put_bucket_versioning(
51+
Bucket=settings.s3_bucket, VersioningConfiguration={"Status": "Enabled"}
52+
)
53+
except Exception as e:
54+
# Minio does not support versioning in all configurations
55+
...
4356

4457
@staticmethod
4558
async def _calculate_file_hash(file: File) -> str:
@@ -72,7 +85,7 @@ async def _upload_new_file(
7285
else:
7386
key = f"{user.id}/{folder}/{str(ULID())}.{SUPPORTED_FILE_TYPES[file_type]}"
7487

75-
self.s3_bucket.put_object(Bucket=settings.s3_bucket, Key=key, Body=contents)
88+
self.s3_bucket.put_object(Key=key, Body=contents)
7689

7790
return {
7891
"response": "file_added",
@@ -98,7 +111,7 @@ async def _upload_new_version(
98111

99112
key = await get_key(s3_url=doc["s3_url"])
100113

101-
self.s3_bucket.put_object(Bucket=settings.s3_bucket, Key=key, Body=contents)
114+
self.s3_bucket.put_object(Key=key, Body=contents)
102115

103116
return {
104117
"response": "file_updated",

app/logs/logger.py

Lines changed: 105 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -1,63 +1,142 @@
1-
from os.path import join, abspath, dirname
1+
import os
22
import logging
33
import logging.config
4-
4+
from pathlib import Path
55

66
LOGGER_NAME: str = "docflow"
77
LOG_FORMAT: str = (
88
"%(asctime)s [%(levelname)s] | %(name)s | %(filename)s | %(funcName)s | %(lineno)d | %(message)s"
99
)
1010
LOG_LEVEL: int = logging.DEBUG
1111

12-
BASE_DIR = abspath(dirname(__file__))
1312

14-
LOG_FILE: str = join(BASE_DIR, "docflow.log")
13+
def get_log_file_path():
14+
"""
15+
Get a writable log file path, trying multiple locations.
16+
Returns None if no writable location is found.
17+
"""
18+
possible_locations = [
19+
"/usr/src/app/logs/docflow.log",
20+
"/app/logs/docflow.log",
21+
"/tmp/docflow.log",
22+
"docflow.log",
23+
]
24+
25+
for log_path in possible_locations:
26+
try:
27+
log_file = Path(log_path)
28+
log_file.parent.mkdir(parents=True, exist_ok=True)
29+
30+
test_write = log_file.parent / f"test_write_{os.getpid()}.tmp"
31+
test_write.touch()
32+
test_write.unlink()
33+
34+
return str(log_file)
35+
except (OSError, PermissionError):
36+
continue
37+
38+
return None
39+
40+
41+
LOG_FILE = get_log_file_path()
1542

1643
LOGGING = {
1744
"version": 1,
18-
"disable_existing_logger": False,
45+
"disable_existing_loggers": False,
1946
"formatters": {
2047
"standard": {
2148
"format": LOG_FORMAT,
2249
"datefmt": "%Y-%m-%d %H:%M:%S",
2350
},
51+
"console": {
52+
"format": "%(asctime)s [%(levelname)s] | %(name)s | %(message)s",
53+
"datefmt": "%Y-%m-%d %H:%M:%S",
54+
},
2455
},
2556
"handlers": {
26-
"default": {
57+
"console": {
2758
"level": "INFO",
28-
"formatter": "standard",
59+
"formatter": "console",
2960
"class": "logging.StreamHandler",
30-
"stream": "ext://sys.stderr",
61+
"stream": "ext://sys.stdout",
3162
},
32-
"file": {
33-
"class": "logging.handlers.RotatingFileHandler",
63+
"error_console": {
64+
"level": "ERROR",
3465
"formatter": "standard",
35-
"level": "DEBUG",
36-
"filename": "docflow.log",
37-
"mode": "a",
38-
"encoding": "utf-8",
39-
"maxBytes": 500000,
40-
"backupCount": 4,
66+
"class": "logging.StreamHandler",
67+
"stream": "ext://sys.stderr",
4168
},
4269
},
4370
"loggers": {
44-
"": {"handlers": ["default"], "level": "INFO", "propagate": True},
71+
"": {
72+
"handlers": ["console"],
73+
"level": "INFO",
74+
"propagate": False
75+
},
4576
LOGGER_NAME: {
46-
"handlers": ["default", "file"],
77+
"handlers": ["console", "error_console"],
4778
"level": LOG_LEVEL,
4879
"propagate": False,
4980
},
50-
"sqlalchemy": {"handlers": ["file"], "level": "WARNING"},
51-
"s3": {"handlers": ["file"], "level": "WARNING"},
52-
"uvicorn.error": {"level": "INFO", "handlers": ["default"], "propagate": False},
53-
"uvicorn.access": {"level": "INFO", "handlers": ["default"], "propagate": True},
54-
"uvicorn.asgi": {"level": "INFO", "handlers": ["default"], "propagate": True},
81+
"sqlalchemy": {
82+
"handlers": ["console"],
83+
"level": "WARNING",
84+
"propagate": False,
85+
},
86+
"s3": {
87+
"handlers": ["console"],
88+
"level": "WARNING",
89+
"propagate": False,
90+
},
91+
"uvicorn.error": {
92+
"level": "INFO",
93+
"handlers": ["console"],
94+
"propagate": False
95+
},
96+
"uvicorn.access": {
97+
"level": "INFO",
98+
"handlers": ["console"],
99+
"propagate": False
100+
},
101+
"uvicorn.asgi": {
102+
"level": "INFO",
103+
"handlers": ["console"],
104+
"propagate": False
105+
},
55106
},
56107
}
57108

58-
logging.config.dictConfig(LOGGING)
109+
if LOG_FILE:
110+
LOGGING["handlers"]["file"] = {
111+
"class": "logging.handlers.RotatingFileHandler",
112+
"formatter": "standard",
113+
"level": "DEBUG",
114+
"filename": LOG_FILE,
115+
"mode": "a",
116+
"encoding": "utf-8",
117+
"maxBytes": 500000,
118+
"backupCount": 4,
119+
}
59120

60-
# loggers
61-
docflow_logger = logging.getLogger("docflow")
121+
LOGGING["loggers"][LOGGER_NAME]["handlers"].append("file")
122+
LOGGING["loggers"]["sqlalchemy"]["handlers"].append("file")
123+
LOGGING["loggers"]["s3"]["handlers"].append("file")
124+
125+
try:
126+
logging.config.dictConfig(LOGGING)
127+
except Exception as e:
128+
logging.basicConfig(
129+
level=LOG_LEVEL,
130+
format=LOG_FORMAT,
131+
handlers=[logging.StreamHandler()]
132+
)
133+
print(f"Warning: Failed to configure logging: {e}")
134+
135+
docflow_logger = logging.getLogger(LOGGER_NAME)
62136
s3_logger = logging.getLogger("s3")
63137
sqlalchemy_logger = logging.getLogger("sqlalchemy")
138+
139+
if LOG_FILE:
140+
docflow_logger.info(f"File logging enabled: {LOG_FILE}")
141+
else:
142+
docflow_logger.warning("File logging disabled - no writable location found")

app/main.py

Lines changed: 31 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,17 +1,36 @@
1+
from contextlib import asynccontextmanager
2+
13
from fastapi import FastAPI
24
from fastapi.responses import FileResponse
35

46
from app.api.router import router
57
from app.core.config import settings
68
from app.db.models import check_tables
9+
from app.logs.logger import docflow_logger
10+
from app.scripts.init_bucket import create_bucket_if_not_exists
11+
712

13+
@asynccontextmanager
14+
async def lifespan(app: FastAPI):
15+
docflow_logger.info("Starting DocFlow...")
16+
17+
try:
18+
docflow_logger.info("Initializing Tables and Storage buckets...")
19+
await check_tables()
20+
await create_bucket_if_not_exists()
21+
docflow_logger.info("Tables and Storage buckets successfully created.")
22+
except Exception as e:
23+
docflow_logger.error(f"Error during startup: {e}")
24+
raise
25+
yield
826

927
app = FastAPI(
1028
title=settings.title,
1129
version=settings.version,
1230
description=settings.description,
1331
docs_url=settings.docs_url,
1432
openapi_url=settings.openapi_url,
33+
lifespan=lifespan,
1534
)
1635

1736
app.include_router(router=router, prefix=settings.api_prefix)
@@ -28,10 +47,18 @@ async def favicon():
2847
@app.get("/", tags=["Default"])
2948
async def root():
3049
return {
31-
"API": "Document Management API... Docker's up!!! is it? or not... Yes it is!!!"
50+
"API": "DocFlow - Document Management API is running! 🚀",
51+
"version": settings.version,
52+
"docs": f"{settings.host_url}{settings.docs_url}",
53+
"storage": "MinIO" if settings.s3_endpoint_url else "AWS S3"
3254
}
3355

3456

35-
@app.on_event("startup")
36-
async def app_startup() -> None:
37-
return await check_tables()
57+
@app.get("/health", tags=["Default"])
58+
async def health_check():
59+
"""Health check endpoint"""
60+
return {
61+
"status": "healthy",
62+
"service": "DocFlow API",
63+
"version": settings.version
64+
}

app/scripts/init-minio.py

Whitespace-only changes.

0 commit comments

Comments
 (0)