diff --git a/README.md b/README.md index 2a9991d..8b6e42a 100644 --- a/README.md +++ b/README.md @@ -61,21 +61,29 @@ From the table, users can download an excel sheet of the job's results, along wi ``` -2. Set environmental variables in `docker-compose.yml`. +2. Set environmental variables and labels in `docker-compose.yml`. -``` +```yaml scraperr: - environment: - - HOSTNAME=localhost # your public domain, or localhost if running locally - - NEXT_PUBLIC_API_PATH=http://scraperr_api.$HOSTNAME # address to api + labels: + - "traefik.enable=true" + - "traefik.http.routers.scraperr.rule=Host(`localhost`)" # change this to your domain, if not running on localhost + - "traefik.http.routers.scraperr.entrypoints=web" # websecure if using https + - "traefik.http.services.scraperr.loadbalancer.server.port=3000" -scraperr_api +scraperr_api: environment: - - HOSTNAME=localhost # needs to be the same as scraperr - MONGODB_URI=mongodb://root:example@webscrape-mongo:27017 # used to access MongoDB - SECRET_KEY=your_secret_key # used to encode authentication tokens (can be a random string) - ALGORITHM=HS256 # authentication encoding algorithm - ACCESS_TOKEN_EXPIRE_MINUTES=600 # access token expire minutes + labels: + - "traefik.enable=true" + - "traefik.http.routers.scraperr_api.rule=Host(`localhost`) && PathPrefix(`/api`)" # change this to your domain, if not running on localhost + - "traefik.http.routers.scraperr_api.entrypoints=web" # websecure if using https + - "traefik.http.middlewares.api-stripprefix.stripprefix.prefixes=/api" + - "traefik.http.routers.scraperr_api.middlewares=api-stripprefix" + - "traefik.http.services.scraperr_api.loadbalancer.server.port=8000" mongo: environment: @@ -83,11 +91,8 @@ mongo: MONGO_INITDB_ROOT_PASSWORD: example ``` -Don't want to use `traefik`? - -Setup your `docker-compose.yml` like this: - - +Don't want to use `traefik`? This configuration can be used in other reverse proxies, as long as the API is proxied to `/api` of the frontend container. This is currently +not able to be used without a reverse proxy, due to limitations of runtime client-side environmental variables in `next.js`. 3. Deploy diff --git a/api/backend/app.py b/api/backend/app.py index 834e4cf..a62c0b5 100644 --- a/api/backend/app.py +++ b/api/backend/app.py @@ -27,7 +27,6 @@ from api.backend.job import ( ) from api.backend.models import ( DownloadJob, - GetStatistics, SubmitScrapeJob, DeleteScrapeJobs, UpdateJobs, @@ -55,13 +54,13 @@ app.add_middleware( ) -@app.post("/api/update") +@app.post("/update") async def update(update_jobs: UpdateJobs, user: User = Depends(get_current_user)): """Used to update jobs""" await update_job(update_jobs.ids, update_jobs.field, update_jobs.value) -@app.post("/api/submit-scrape-job") +@app.post("/submit-scrape-job") async def submit_scrape_job(job: SubmitScrapeJob, background_tasks: BackgroundTasks): LOG.info(f"Recieved job: {job}") try: @@ -76,7 +75,7 @@ async def submit_scrape_job(job: SubmitScrapeJob, background_tasks: BackgroundTa return JSONResponse(content={"error": str(e)}, status_code=500) -@app.post("/api/retrieve-scrape-jobs") +@app.post("/retrieve-scrape-jobs") async def retrieve_scrape_jobs(user: User = Depends(get_current_user)): LOG.info(f"Retrieving jobs for account: {user.email}") try: @@ -94,7 +93,7 @@ def clean_text(text: str): return text -@app.post("/api/download") +@app.post("/download") async def download(download_job: DownloadJob): LOG.info(f"Downloading job with ids: {download_job.ids}") try: @@ -162,7 +161,7 @@ async def download(download_job: DownloadJob): return {"error": str(e)} -@app.post("/api/delete-scrape-jobs") +@app.post("/delete-scrape-jobs") async def delete(delete_scrape_jobs: DeleteScrapeJobs): result = await delete_jobs(delete_scrape_jobs.ids) return ( @@ -172,7 +171,7 @@ async def delete(delete_scrape_jobs: DeleteScrapeJobs): ) -@app.get("/api/initial_logs") +@app.get("/initial_logs") async def get_initial_logs(): container_id = "scraperr_api" @@ -184,7 +183,7 @@ async def get_initial_logs(): raise HTTPException(status_code=500, detail=f"Unexpected error: {e}") -@app.get("/api/logs") +@app.get("/logs") async def get_own_logs(): container_id = "scraperr_api" @@ -204,12 +203,12 @@ async def get_own_logs(): raise HTTPException(status_code=500, detail=str(e)) -@app.get("/api/statistics/get-average-element-per-link") +@app.get("/statistics/get-average-element-per-link") async def get_average_element_per_link(user: User = Depends(get_current_user)): return await average_elements_per_link(user.email) -@app.get("/api/statistics/get-average-jobs-per-day") +@app.get("/statistics/get-average-jobs-per-day") async def average_jobs_per_day(user: User = Depends(get_current_user)): data = await get_jobs_per_day(user.email) return data diff --git a/api/backend/auth/auth_router.py b/api/backend/auth/auth_router.py index 41a1b9e..ae3ab45 100644 --- a/api/backend/auth/auth_router.py +++ b/api/backend/auth/auth_router.py @@ -19,7 +19,7 @@ from api.backend.auth.auth_utils import ( auth_router = APIRouter() -@auth_router.post("/api/auth/token", response_model=Token) +@auth_router.post("/auth/token", response_model=Token) async def login_for_access_token(form_data: OAuth2PasswordRequestForm = Depends()): user = await authenticate_user(form_data.username, form_data.password) if not user: @@ -41,7 +41,7 @@ async def login_for_access_token(form_data: OAuth2PasswordRequestForm = Depends( return {"access_token": access_token, "token_type": "bearer"} -@auth_router.post("/api/auth/signup", response_model=User) +@auth_router.post("/auth/signup", response_model=User) async def create_user(user: UserCreate): users_collection = get_user_collection() hashed_password = get_password_hash(user.password) @@ -52,6 +52,6 @@ async def create_user(user: UserCreate): return user_dict -@auth_router.get("/api/auth/users/me", response_model=User) +@auth_router.get("/auth/users/me", response_model=User) async def read_users_me(current_user: User = Depends(get_current_user)): return current_user diff --git a/docker-compose.yml b/docker-compose.yml index 27574b1..a275ad0 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -6,29 +6,20 @@ services: dockerfile: docker/frontend/Dockerfile container_name: scraperr command: ["npm", "run", "start"] - ports: - - 3000:3000 - environment: - - HOSTNAME=localhost - - NEXT_PUBLIC_API_PATH=http://localhost:8000 labels: - "traefik.enable=true" - - "traefik.http.routers.scraperr.rule=Host(`${HOSTNAME}`)" + - "traefik.http.routers.scraperr.rule=Host(`localhost`)" # change this to your domain, if not running on localhost - "traefik.http.routers.scraperr.entrypoints=web" # websecure if using https - "traefik.http.services.scraperr.loadbalancer.server.port=3000" networks: - web - scraperr_api: init: True image: jpyles0524/scraperr_api:latest build: context: . dockerfile: docker/api/Dockerfile - ports: - - 8000:8000 environment: - - HOSTNAME=localhost - MONGODB_URI=mongodb://root:example@webscrape-mongo:27017 # used to access MongoDB - SECRET_KEY=your_secret_key # used to encode authentication tokens (can be a random string) - ALGORITHM=HS256 # authentication encoding algorithm @@ -38,26 +29,27 @@ services: - /var/run/docker.sock:/var/run/docker.sock labels: - "traefik.enable=true" - - "traefik.http.routers.scraperr_api.rule=Host(`scraperr_api.${HOSTNAME}`)" + - "traefik.http.routers.scraperr_api.rule=Host(`localhost`) && PathPrefix(`/api`)" # change this to your domain, if not running on localhost - "traefik.http.routers.scraperr_api.entrypoints=web" # websecure if using https + - "traefik.http.middlewares.api-stripprefix.stripprefix.prefixes=/api" + - "traefik.http.routers.scraperr_api.middlewares=api-stripprefix" - "traefik.http.services.scraperr_api.loadbalancer.server.port=8000" networks: - web - - # traefik: - # image: traefik:latest - # container_name: traefik - # command: - # - "--providers.docker=true" - # - "--entrypoints.web.address=:80" - # - "--entrypoints.websecure.address=:443" - # ports: - # - 80:80 - # - 443:443 - # volumes: - # - /var/run/docker.sock:/var/run/docker.sock:ro" - # networks: - # - web + traefik: + image: traefik:latest + container_name: traefik + command: + - "--providers.docker=true" + - "--entrypoints.web.address=:80" + - "--entrypoints.websecure.address=:443" + ports: + - 80:80 + - 443:443 + volumes: + - /var/run/docker.sock:/var/run/docker.sock:ro" + networks: + - web mongo: container_name: webscrape-mongo image: mongo @@ -67,6 +59,5 @@ services: MONGO_INITDB_ROOT_PASSWORD: example networks: - web - networks: web: diff --git a/src/contexts/AuthContext.tsx b/src/contexts/AuthContext.tsx index 7e9c71e..63e0207 100644 --- a/src/contexts/AuthContext.tsx +++ b/src/contexts/AuthContext.tsx @@ -22,7 +22,6 @@ export const AuthProvider: React.FC = ({ children }) => { const [isAuthenticated, setIsAuthenticated] = useState(false); useEffect(() => { - // const token = localStorage.getItem("token"); const token = Cookies.get("token"); if (token) { axios @@ -54,8 +53,6 @@ export const AuthProvider: React.FC = ({ children }) => { secure: false, sameSite: "Lax", }); - // localStorage.setItem("token", response.data.access_token); - console.log(response.data.access_token); const userResponse = await axios.get( `${Constants.DOMAIN}/api/auth/users/me`, { @@ -68,7 +65,6 @@ export const AuthProvider: React.FC = ({ children }) => { const logout = () => { Cookies.remove("token"); - // localStorage.removeItem("token"); setUser(null); setIsAuthenticated(false); }; diff --git a/src/lib/constants.ts b/src/lib/constants.ts index 70e7710..f4c079e 100644 --- a/src/lib/constants.ts +++ b/src/lib/constants.ts @@ -1,3 +1,3 @@ export const Constants = { - DOMAIN: process.env.NEXT_PUBLIC_API_PATH, + DOMAIN: "", }; diff --git a/src/pages/statistics.tsx b/src/pages/statistics.tsx index ba4ab92..11b10c2 100644 --- a/src/pages/statistics.tsx +++ b/src/pages/statistics.tsx @@ -30,7 +30,7 @@ export const getServerSideProps: GetServerSideProps = async (context) => { if (token) { try { const averageElementResponse = await fetch( - `http://scraperr_api:8000/api/statistics/get-average-element-per-link`, + `http://scraperr_api:8000/statistics/get-average-element-per-link`, { headers: { Authorization: `Bearer ${token}` }, } @@ -39,7 +39,7 @@ export const getServerSideProps: GetServerSideProps = async (context) => { averageElement = await averageElementResponse.json(); const averageJobResponse = await fetch( - `http://scraperr_api:8000/api/statistics/get-average-jobs-per-day`, + `http://scraperr_api:8000/statistics/get-average-jobs-per-day`, { headers: { Authorization: `Bearer ${token}` }, }