wip: finalize implementation of SSR

This commit is contained in:
Jayden
2024-07-28 10:24:33 -05:00
parent 92b388de00
commit b9dfd56f0a
7 changed files with 50 additions and 59 deletions

View File

@@ -61,21 +61,29 @@ From the table, users can download an excel sheet of the job's results, along wi
``` ```
2. Set environmental variables in `docker-compose.yml`. 2. Set environmental variables and labels in `docker-compose.yml`.
``` ```yaml
scraperr: scraperr:
environment: labels:
- HOSTNAME=localhost # your public domain, or localhost if running locally - "traefik.enable=true"
- NEXT_PUBLIC_API_PATH=http://scraperr_api.$HOSTNAME # address to api - "traefik.http.routers.scraperr.rule=Host(`localhost`)" # change this to your domain, if not running on localhost
- "traefik.http.routers.scraperr.entrypoints=web" # websecure if using https
- "traefik.http.services.scraperr.loadbalancer.server.port=3000"
scraperr_api scraperr_api:
environment: environment:
- HOSTNAME=localhost # needs to be the same as scraperr
- MONGODB_URI=mongodb://root:example@webscrape-mongo:27017 # used to access MongoDB - MONGODB_URI=mongodb://root:example@webscrape-mongo:27017 # used to access MongoDB
- SECRET_KEY=your_secret_key # used to encode authentication tokens (can be a random string) - SECRET_KEY=your_secret_key # used to encode authentication tokens (can be a random string)
- ALGORITHM=HS256 # authentication encoding algorithm - ALGORITHM=HS256 # authentication encoding algorithm
- ACCESS_TOKEN_EXPIRE_MINUTES=600 # access token expire minutes - ACCESS_TOKEN_EXPIRE_MINUTES=600 # access token expire minutes
labels:
- "traefik.enable=true"
- "traefik.http.routers.scraperr_api.rule=Host(`localhost`) && PathPrefix(`/api`)" # change this to your domain, if not running on localhost
- "traefik.http.routers.scraperr_api.entrypoints=web" # websecure if using https
- "traefik.http.middlewares.api-stripprefix.stripprefix.prefixes=/api"
- "traefik.http.routers.scraperr_api.middlewares=api-stripprefix"
- "traefik.http.services.scraperr_api.loadbalancer.server.port=8000"
mongo: mongo:
environment: environment:
@@ -83,11 +91,8 @@ mongo:
MONGO_INITDB_ROOT_PASSWORD: example MONGO_INITDB_ROOT_PASSWORD: example
``` ```
Don't want to use `traefik`? Don't want to use `traefik`? This configuration can be used in other reverse proxies, as long as the API is proxied to `/api` of the frontend container. This is currently
not able to be used without a reverse proxy, due to limitations of runtime client-side environmental variables in `next.js`.
Setup your `docker-compose.yml` like this:
3. Deploy 3. Deploy

View File

@@ -27,7 +27,6 @@ from api.backend.job import (
) )
from api.backend.models import ( from api.backend.models import (
DownloadJob, DownloadJob,
GetStatistics,
SubmitScrapeJob, SubmitScrapeJob,
DeleteScrapeJobs, DeleteScrapeJobs,
UpdateJobs, UpdateJobs,
@@ -55,13 +54,13 @@ app.add_middleware(
) )
@app.post("/api/update") @app.post("/update")
async def update(update_jobs: UpdateJobs, user: User = Depends(get_current_user)): async def update(update_jobs: UpdateJobs, user: User = Depends(get_current_user)):
"""Used to update jobs""" """Used to update jobs"""
await update_job(update_jobs.ids, update_jobs.field, update_jobs.value) await update_job(update_jobs.ids, update_jobs.field, update_jobs.value)
@app.post("/api/submit-scrape-job") @app.post("/submit-scrape-job")
async def submit_scrape_job(job: SubmitScrapeJob, background_tasks: BackgroundTasks): async def submit_scrape_job(job: SubmitScrapeJob, background_tasks: BackgroundTasks):
LOG.info(f"Recieved job: {job}") LOG.info(f"Recieved job: {job}")
try: try:
@@ -76,7 +75,7 @@ async def submit_scrape_job(job: SubmitScrapeJob, background_tasks: BackgroundTa
return JSONResponse(content={"error": str(e)}, status_code=500) return JSONResponse(content={"error": str(e)}, status_code=500)
@app.post("/api/retrieve-scrape-jobs") @app.post("/retrieve-scrape-jobs")
async def retrieve_scrape_jobs(user: User = Depends(get_current_user)): async def retrieve_scrape_jobs(user: User = Depends(get_current_user)):
LOG.info(f"Retrieving jobs for account: {user.email}") LOG.info(f"Retrieving jobs for account: {user.email}")
try: try:
@@ -94,7 +93,7 @@ def clean_text(text: str):
return text return text
@app.post("/api/download") @app.post("/download")
async def download(download_job: DownloadJob): async def download(download_job: DownloadJob):
LOG.info(f"Downloading job with ids: {download_job.ids}") LOG.info(f"Downloading job with ids: {download_job.ids}")
try: try:
@@ -162,7 +161,7 @@ async def download(download_job: DownloadJob):
return {"error": str(e)} return {"error": str(e)}
@app.post("/api/delete-scrape-jobs") @app.post("/delete-scrape-jobs")
async def delete(delete_scrape_jobs: DeleteScrapeJobs): async def delete(delete_scrape_jobs: DeleteScrapeJobs):
result = await delete_jobs(delete_scrape_jobs.ids) result = await delete_jobs(delete_scrape_jobs.ids)
return ( return (
@@ -172,7 +171,7 @@ async def delete(delete_scrape_jobs: DeleteScrapeJobs):
) )
@app.get("/api/initial_logs") @app.get("/initial_logs")
async def get_initial_logs(): async def get_initial_logs():
container_id = "scraperr_api" container_id = "scraperr_api"
@@ -184,7 +183,7 @@ async def get_initial_logs():
raise HTTPException(status_code=500, detail=f"Unexpected error: {e}") raise HTTPException(status_code=500, detail=f"Unexpected error: {e}")
@app.get("/api/logs") @app.get("/logs")
async def get_own_logs(): async def get_own_logs():
container_id = "scraperr_api" container_id = "scraperr_api"
@@ -204,12 +203,12 @@ async def get_own_logs():
raise HTTPException(status_code=500, detail=str(e)) raise HTTPException(status_code=500, detail=str(e))
@app.get("/api/statistics/get-average-element-per-link") @app.get("/statistics/get-average-element-per-link")
async def get_average_element_per_link(user: User = Depends(get_current_user)): async def get_average_element_per_link(user: User = Depends(get_current_user)):
return await average_elements_per_link(user.email) return await average_elements_per_link(user.email)
@app.get("/api/statistics/get-average-jobs-per-day") @app.get("/statistics/get-average-jobs-per-day")
async def average_jobs_per_day(user: User = Depends(get_current_user)): async def average_jobs_per_day(user: User = Depends(get_current_user)):
data = await get_jobs_per_day(user.email) data = await get_jobs_per_day(user.email)
return data return data

View File

@@ -19,7 +19,7 @@ from api.backend.auth.auth_utils import (
auth_router = APIRouter() auth_router = APIRouter()
@auth_router.post("/api/auth/token", response_model=Token) @auth_router.post("/auth/token", response_model=Token)
async def login_for_access_token(form_data: OAuth2PasswordRequestForm = Depends()): async def login_for_access_token(form_data: OAuth2PasswordRequestForm = Depends()):
user = await authenticate_user(form_data.username, form_data.password) user = await authenticate_user(form_data.username, form_data.password)
if not user: if not user:
@@ -41,7 +41,7 @@ async def login_for_access_token(form_data: OAuth2PasswordRequestForm = Depends(
return {"access_token": access_token, "token_type": "bearer"} return {"access_token": access_token, "token_type": "bearer"}
@auth_router.post("/api/auth/signup", response_model=User) @auth_router.post("/auth/signup", response_model=User)
async def create_user(user: UserCreate): async def create_user(user: UserCreate):
users_collection = get_user_collection() users_collection = get_user_collection()
hashed_password = get_password_hash(user.password) hashed_password = get_password_hash(user.password)
@@ -52,6 +52,6 @@ async def create_user(user: UserCreate):
return user_dict return user_dict
@auth_router.get("/api/auth/users/me", response_model=User) @auth_router.get("/auth/users/me", response_model=User)
async def read_users_me(current_user: User = Depends(get_current_user)): async def read_users_me(current_user: User = Depends(get_current_user)):
return current_user return current_user

View File

@@ -6,29 +6,20 @@ services:
dockerfile: docker/frontend/Dockerfile dockerfile: docker/frontend/Dockerfile
container_name: scraperr container_name: scraperr
command: ["npm", "run", "start"] command: ["npm", "run", "start"]
ports:
- 3000:3000
environment:
- HOSTNAME=localhost
- NEXT_PUBLIC_API_PATH=http://localhost:8000
labels: labels:
- "traefik.enable=true" - "traefik.enable=true"
- "traefik.http.routers.scraperr.rule=Host(`${HOSTNAME}`)" - "traefik.http.routers.scraperr.rule=Host(`localhost`)" # change this to your domain, if not running on localhost
- "traefik.http.routers.scraperr.entrypoints=web" # websecure if using https - "traefik.http.routers.scraperr.entrypoints=web" # websecure if using https
- "traefik.http.services.scraperr.loadbalancer.server.port=3000" - "traefik.http.services.scraperr.loadbalancer.server.port=3000"
networks: networks:
- web - web
scraperr_api: scraperr_api:
init: True init: True
image: jpyles0524/scraperr_api:latest image: jpyles0524/scraperr_api:latest
build: build:
context: . context: .
dockerfile: docker/api/Dockerfile dockerfile: docker/api/Dockerfile
ports:
- 8000:8000
environment: environment:
- HOSTNAME=localhost
- MONGODB_URI=mongodb://root:example@webscrape-mongo:27017 # used to access MongoDB - MONGODB_URI=mongodb://root:example@webscrape-mongo:27017 # used to access MongoDB
- SECRET_KEY=your_secret_key # used to encode authentication tokens (can be a random string) - SECRET_KEY=your_secret_key # used to encode authentication tokens (can be a random string)
- ALGORITHM=HS256 # authentication encoding algorithm - ALGORITHM=HS256 # authentication encoding algorithm
@@ -38,26 +29,27 @@ services:
- /var/run/docker.sock:/var/run/docker.sock - /var/run/docker.sock:/var/run/docker.sock
labels: labels:
- "traefik.enable=true" - "traefik.enable=true"
- "traefik.http.routers.scraperr_api.rule=Host(`scraperr_api.${HOSTNAME}`)" - "traefik.http.routers.scraperr_api.rule=Host(`localhost`) && PathPrefix(`/api`)" # change this to your domain, if not running on localhost
- "traefik.http.routers.scraperr_api.entrypoints=web" # websecure if using https - "traefik.http.routers.scraperr_api.entrypoints=web" # websecure if using https
- "traefik.http.middlewares.api-stripprefix.stripprefix.prefixes=/api"
- "traefik.http.routers.scraperr_api.middlewares=api-stripprefix"
- "traefik.http.services.scraperr_api.loadbalancer.server.port=8000" - "traefik.http.services.scraperr_api.loadbalancer.server.port=8000"
networks: networks:
- web - web
traefik:
# traefik: image: traefik:latest
# image: traefik:latest container_name: traefik
# container_name: traefik command:
# command: - "--providers.docker=true"
# - "--providers.docker=true" - "--entrypoints.web.address=:80"
# - "--entrypoints.web.address=:80" - "--entrypoints.websecure.address=:443"
# - "--entrypoints.websecure.address=:443" ports:
# ports: - 80:80
# - 80:80 - 443:443
# - 443:443 volumes:
# volumes: - /var/run/docker.sock:/var/run/docker.sock:ro"
# - /var/run/docker.sock:/var/run/docker.sock:ro" networks:
# networks: - web
# - web
mongo: mongo:
container_name: webscrape-mongo container_name: webscrape-mongo
image: mongo image: mongo
@@ -67,6 +59,5 @@ services:
MONGO_INITDB_ROOT_PASSWORD: example MONGO_INITDB_ROOT_PASSWORD: example
networks: networks:
- web - web
networks: networks:
web: web:

View File

@@ -22,7 +22,6 @@ export const AuthProvider: React.FC<AuthProps> = ({ children }) => {
const [isAuthenticated, setIsAuthenticated] = useState<boolean>(false); const [isAuthenticated, setIsAuthenticated] = useState<boolean>(false);
useEffect(() => { useEffect(() => {
// const token = localStorage.getItem("token");
const token = Cookies.get("token"); const token = Cookies.get("token");
if (token) { if (token) {
axios axios
@@ -54,8 +53,6 @@ export const AuthProvider: React.FC<AuthProps> = ({ children }) => {
secure: false, secure: false,
sameSite: "Lax", sameSite: "Lax",
}); });
// localStorage.setItem("token", response.data.access_token);
console.log(response.data.access_token);
const userResponse = await axios.get( const userResponse = await axios.get(
`${Constants.DOMAIN}/api/auth/users/me`, `${Constants.DOMAIN}/api/auth/users/me`,
{ {
@@ -68,7 +65,6 @@ export const AuthProvider: React.FC<AuthProps> = ({ children }) => {
const logout = () => { const logout = () => {
Cookies.remove("token"); Cookies.remove("token");
// localStorage.removeItem("token");
setUser(null); setUser(null);
setIsAuthenticated(false); setIsAuthenticated(false);
}; };

View File

@@ -1,3 +1,3 @@
export const Constants = { export const Constants = {
DOMAIN: process.env.NEXT_PUBLIC_API_PATH, DOMAIN: "",
}; };

View File

@@ -30,7 +30,7 @@ export const getServerSideProps: GetServerSideProps = async (context) => {
if (token) { if (token) {
try { try {
const averageElementResponse = await fetch( const averageElementResponse = await fetch(
`http://scraperr_api:8000/api/statistics/get-average-element-per-link`, `http://scraperr_api:8000/statistics/get-average-element-per-link`,
{ {
headers: { Authorization: `Bearer ${token}` }, headers: { Authorization: `Bearer ${token}` },
} }
@@ -39,7 +39,7 @@ export const getServerSideProps: GetServerSideProps = async (context) => {
averageElement = await averageElementResponse.json(); averageElement = await averageElementResponse.json();
const averageJobResponse = await fetch( const averageJobResponse = await fetch(
`http://scraperr_api:8000/api/statistics/get-average-jobs-per-day`, `http://scraperr_api:8000/statistics/get-average-jobs-per-day`,
{ {
headers: { Authorization: `Bearer ${token}` }, headers: { Authorization: `Bearer ${token}` },
} }