Skip to content

Commit 1e0324f

Browse files
Feature Updates: User Groups, Favourites, Secondary Location & Bugsnag Integration (#7)
* Feature: User favourites (#2) * Feature: initial routers / methods for favourites - add env documentation / adminer image for easier inspection of db - create / delete favourite entry in database - fetch list of favourites by user * Chore: update formatting, tests * update test methods / logging - * updates * expand tests / logging * env updates * .env updates (#8) * env updates * update example env * initial routes * update methods * Add secondary country to signals data (#10) * initial routes * update methods * Fix: Update Python setup action to v4 and fix cache configuration * Update signal.py * Collaborative signal editing (user groups etc) (#9) * full routers / entities / test setup * update methods * Delete signal_collaborators.sql --------- Co-authored-by: amaguire-undp <[email protected]> * add endpoints to get auth user's groups + signals * add emails to user group routes * configure application with bugsnag * update app configuration with bugsnag --------- Co-authored-by: amaguire-undp <[email protected]>
1 parent c71ae3a commit 1e0324f

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

41 files changed

+4016
-40
lines changed

.env.example

Lines changed: 40 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,40 @@
1+
# Authentication
2+
TENANT_ID="<microsoft-entra-tenant-id>"
3+
CLIENT_ID="<app-id>"
4+
API_KEY="<strong-password>" # for accessing "public" endpoints
5+
6+
# Database and Storage
7+
DB_CONNECTION="postgresql://<user>:<password>@<host>:5432/<staging|production>"
8+
SAS_URL="https://<account-name>.blob.core.windows.net/<container-name>?<sas-token>"
9+
10+
# Azure OpenAI, only required for `/signals/generation`
11+
AZURE_OPENAI_ENDPOINT="https://<subdomain>.openai.azure.com/"
12+
AZURE_OPENAI_API_KEY="<api-key>"
13+
14+
# Testing, only required to run tests, must be a valid token of a regular user
15+
API_JWT="<json-token>"
16+
# Email Configuration
17+
MS_FROM_EMAIL=[email protected]
18+
EMAIL_SERVICE_TYPE=ms_graph
19+
20+
# SendGrid Configuration (if using SendGrid email service)
21+
SENDGRID_API_KEY=
22+
SENDGRID_FROM_EMAIL=
23+
24+
# Azure Authentication
25+
TENANT_ID=
26+
CLIENT_ID=
27+
28+
# API Authentication
29+
API_KEY=
30+
API_JWT=
31+
32+
# Database Connection
33+
DB_CONNECTION=
34+
35+
# Azure Storage
36+
SAS_URL=
37+
38+
# Azure OpenAI Configuration
39+
AZURE_OPENAI_ENDPOINT=
40+
AZURE_OPENAI_API_KEY=

.env.local

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
TENANT_ID=
2+
CLIENT_ID=
3+
API_KEY=
4+
5+
DB_CONNECTION=
6+
SAS_URL=
7+
8+
AZURE_OPENAI_ENDPOINT=
9+
AZURE_OPENAI_API_KEY=
10+
11+
API_JWT=
12+
13+
NEWS_API_KEY=

.github/workflows/azure-webapps-python.yml

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -24,10 +24,11 @@ jobs:
2424
- uses: actions/checkout@v4
2525

2626
- name: Set up Python version
27-
uses: actions/setup-python@v3
27+
uses: actions/setup-python@v4
2828
with:
2929
python-version: ${{ env.PYTHON_VERSION }}
30-
cache: 'pip'
30+
cache: pip
31+
cache-dependency-path: 'requirements.txt'
3132

3233
- name: Create and start virtual environment
3334
run: |

.gitignore

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -140,3 +140,9 @@ cython_debug/
140140
# Manually added for this project
141141
.idea/
142142
**/.DS_Store
143+
create_test_user.sql
144+
.env.production
145+
/.prs
146+
Taskfile.yml
147+
.env.local
148+
/.logs

Dockerfile

Lines changed: 19 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,26 @@
11
FROM python:3.11.7-slim
2+
3+
# Install system dependencies
24
RUN apt-get update -y \
3-
&& apt-get install libpq-dev -y \
5+
&& apt-get install -y \
6+
libpq-dev \
7+
postgresql-client \
8+
curl \
49
&& rm -rf /var/lib/apt/lists/*
10+
511
WORKDIR /app
6-
COPY requirements.txt .
7-
RUN pip install --no-cache-dir --upgrade -r requirements.txt
12+
13+
# Install Python dependencies including development dependencies
14+
COPY requirements.txt requirements_dev.txt ./
15+
RUN pip install --no-cache-dir --upgrade -r requirements.txt -r requirements_dev.txt
16+
17+
# Copy application code
818
COPY . .
19+
920
EXPOSE 8000
21+
22+
# Add healthcheck
23+
HEALTHCHECK --interval=30s --timeout=30s --start-period=5s --retries=3 \
24+
CMD curl --fail http://localhost:8000/signals/search || exit 1
25+
1026
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "8000"]

Makefile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,4 +5,4 @@ format:
55
lint:
66
pylint main.py src/
77
test:
8-
python -m pytest tests/
8+
python -m pytest tests/

docker-compose.yaml

Lines changed: 30 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -5,16 +5,43 @@ services:
55
env_file: .env
66
environment:
77
- DB_CONNECTION=postgresql://postgres:password@db:5432/postgres
8+
- ENV_MODE=local
89
ports:
910
- "8000:8000"
11+
volumes:
12+
- .:/app
1013
depends_on:
11-
- db
14+
db:
15+
condition: service_healthy
16+
command: >
17+
sh -c "sleep 5 && uvicorn main:app --host 0.0.0.0 --port 8000 --reload"
1218
db:
1319
image: postgres:16.4-alpine
1420
environment:
1521
POSTGRES_PASSWORD: password
22+
POSTGRES_DB: postgres
1623
ports:
17-
- "5432:5432"
24+
- 5432:5432
1825
volumes:
19-
- ./sql:/docker-entrypoint-initdb.d
26+
- postgres_data:/var/lib/postgresql/data
27+
- ./sql/create_tables.sql:/docker-entrypoint-initdb.d/1-create_tables.sql
28+
- ./sql/import_data.sql:/docker-entrypoint-initdb.d/2-import_data.sql
29+
- ./sql/init_test_data.sql:/docker-entrypoint-initdb.d/3-init_test_data.sql
30+
- ./sql/add_secondary_location.sql:/app/sql/add_secondary_location.sql
2031
- ./data:/docker-entrypoint-initdb.d/data
32+
healthcheck:
33+
test: ["CMD-SHELL", "pg_isready -U postgres"]
34+
interval: 5s
35+
timeout: 5s
36+
retries: 5
37+
adminer:
38+
image: adminer
39+
restart: always
40+
ports:
41+
- 4040:8080
42+
depends_on:
43+
db:
44+
condition: service_healthy
45+
46+
volumes:
47+
postgres_data:

main.py

Lines changed: 90 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,13 +3,31 @@
33
the frontend platform with the backend database.
44
"""
55

6+
import os
7+
import logging
8+
import datetime
69
from dotenv import load_dotenv
7-
from fastapi import Depends, FastAPI
10+
from fastapi import Depends, FastAPI, Request
11+
from fastapi.middleware.cors import CORSMiddleware
12+
from fastapi.responses import JSONResponse
813

914
from src import routers
1015
from src.authentication import authenticate_user
16+
from src.config.logging_config import setup_logging
17+
from src.bugsnag_config import configure_bugsnag, setup_bugsnag_logging, get_bugsnag_middleware, BUGSNAG_ENABLED
1118

19+
# Load environment variables and set up logging
1220
load_dotenv()
21+
setup_logging()
22+
23+
# Get application version
24+
app_version = os.environ.get("RELEASE_VERSION", "dev")
25+
app_env = os.environ.get("ENVIRONMENT", "development")
26+
logging.info(f"Starting application - version: {app_version}, environment: {app_env}")
27+
28+
# Configure Bugsnag for error tracking
29+
configure_bugsnag()
30+
setup_bugsnag_logging()
1331

1432
app = FastAPI(
1533
debug=False,
@@ -42,11 +60,82 @@
4260
{"name": "trends", "description": "CRUD operations on trends."},
4361
{"name": "users", "description": "CRUD operations on users."},
4462
{"name": "choices", "description": "List valid options for forms fields."},
63+
{"name": "favourites", "description": "Manage user's favorite signals."},
4564
],
4665
docs_url="/",
4766
redoc_url=None,
4867
)
4968

69+
# Add global exception handler to report errors to Bugsnag
70+
@app.exception_handler(Exception)
71+
async def global_exception_handler(request: Request, exc: Exception):
72+
logging.error(f"Unhandled exception: {str(exc)}", exc_info=True)
73+
74+
if BUGSNAG_ENABLED:
75+
import bugsnag
76+
bugsnag.notify(
77+
exc,
78+
metadata={
79+
"request": {
80+
"url": str(request.url),
81+
"method": request.method,
82+
"headers": dict(request.headers),
83+
"client": request.client.host if request.client else None,
84+
}
85+
}
86+
)
87+
88+
return JSONResponse(
89+
status_code=500,
90+
content={"detail": "Internal server error"},
91+
)
92+
93+
# allow cors
94+
app.add_middleware(
95+
CORSMiddleware,
96+
allow_origins=["*"],
97+
allow_credentials=True,
98+
allow_methods=["*"],
99+
allow_headers=["*"],
100+
)
101+
102+
# Add Bugsnag exception handling middleware
103+
# Important: Add middleware AFTER registering exception handlers
104+
bugsnag_app = get_bugsnag_middleware(app)
50105

51106
for router in routers.ALL:
52107
app.include_router(router=router, dependencies=[Depends(authenticate_user)])
108+
109+
# Add diagnostic endpoint for health checks and Bugsnag verification
110+
@app.get("/_health", include_in_schema=False)
111+
async def health_check():
112+
"""Health check endpoint that also shows the current environment and version."""
113+
return {
114+
"status": "ok",
115+
"environment": app_env,
116+
"version": app_version,
117+
"bugsnag_enabled": BUGSNAG_ENABLED
118+
}
119+
120+
# Test endpoint to trigger a test error report to Bugsnag if enabled
121+
@app.get("/_test-error", include_in_schema=False)
122+
async def test_error():
123+
"""Trigger a test error to verify Bugsnag is working."""
124+
if BUGSNAG_ENABLED:
125+
import bugsnag
126+
bugsnag.notify(
127+
Exception("Test error triggered via /_test-error endpoint"),
128+
metadata={
129+
"test_info": {
130+
"environment": app_env,
131+
"version": app_version,
132+
"timestamp": str(datetime.datetime.now())
133+
}
134+
}
135+
)
136+
return {"status": "error_reported", "message": "Test error sent to Bugsnag"}
137+
else:
138+
return {"status": "disabled", "message": "Bugsnag is not enabled"}
139+
140+
# Use the Bugsnag middleware wrapped app for ASGI
141+
app = bugsnag_app

requirements.txt

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,3 +14,4 @@ pillow ~= 11.0.0
1414
beautifulsoup4 ~= 4.12.3
1515
lxml ~= 5.3.0
1616
openai == 1.52.2
17+
bugsnag>=4.0.0

requirements_dev.txt

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,3 +4,6 @@ isort ~= 5.13.2
44
pylint ~= 3.3.1
55
pytest ~= 8.3.3
66
notebook ~= 7.2.2
7+
pytest-asyncio==0.21.1
8+
pytest-cov==4.1.0
9+
pytest-watch==4.2.0

setup.py

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
from setuptools import find_packages, setup
2+
3+
setup(
4+
name="ftss-api",
5+
version="0.1",
6+
packages=find_packages(),
7+
install_requires=[
8+
"fastapi",
9+
"uvicorn",
10+
"psycopg",
11+
"pydantic",
12+
],
13+
)

sql/add_secondary_location.sql

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,24 @@
1+
/*
2+
Migration script to add secondary_location column to signals table.
3+
Run this script to update the database schema.
4+
*/
5+
6+
-- Add secondary_location column to signals table
7+
ALTER TABLE signals ADD COLUMN IF NOT EXISTS secondary_location TEXT[];
8+
9+
-- Update the index to include the new column
10+
DROP INDEX IF EXISTS signals_idx;
11+
CREATE INDEX ON signals (
12+
status,
13+
created_by,
14+
created_for,
15+
created_unit,
16+
steep_primary,
17+
steep_secondary,
18+
signature_primary,
19+
signature_secondary,
20+
sdgs,
21+
location,
22+
secondary_location,
23+
score
24+
);

sql/create_tables.sql

Lines changed: 17 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -24,11 +24,13 @@ CREATE TABLE users (
2424
role VARCHAR(255) NOT NULL,
2525
name VARCHAR(255),
2626
unit VARCHAR(255),
27-
acclab BOOLEAN
27+
acclab BOOLEAN,
28+
api_key VARCHAR(255) UNIQUE
2829
);
2930

3031
CREATE INDEX ON users (email);
3132
CREATE INDEX ON users (role);
33+
CREATE INDEX ON users (api_key);
3234

3335
-- signals table and indices
3436
CREATE TABLE signals (
@@ -118,6 +120,19 @@ CREATE TABLE connections (
118120
CONSTRAINT connection_pk PRIMARY KEY (signal_id, trend_id)
119121
);
120122

123+
-- favourites table to track user's favourite signals
124+
CREATE TABLE favourites (
125+
user_id INT REFERENCES users(id) ON DELETE CASCADE,
126+
signal_id INT REFERENCES signals(id) ON DELETE CASCADE,
127+
created_at TIMESTAMP NOT NULL DEFAULT NOW(),
128+
CONSTRAINT favourites_pk PRIMARY KEY (user_id, signal_id)
129+
);
130+
131+
CREATE INDEX ON favourites (user_id, created_at);
132+
133+
CREATE INDEX favourites_user_signal_idx ON favourites (user_id, signal_id);
134+
CREATE INDEX favourites_created_at_idx ON favourites (created_at DESC);
135+
121136
-- locations table and indices
122137
CREATE TABLE locations (
123138
id SERIAL PRIMARY KEY,
@@ -134,4 +149,4 @@ CREATE TABLE units (
134149
name TEXT NOT NULL,
135150
region VARCHAR(255)
136151
);
137-
CREATE INDEX ON units (name, region);
152+
CREATE INDEX ON units (name, region);

0 commit comments

Comments
 (0)