wip: finalize implementation of SSR

This commit is contained in:
Jayden
2024-07-28 10:24:33 -05:00
parent 92b388de00
commit b9dfd56f0a
7 changed files with 50 additions and 59 deletions

View File

@@ -61,21 +61,29 @@ From the table, users can download an excel sheet of the job's results, along wi
```
2. Set environmental variables in `docker-compose.yml`.
2. Set environmental variables and labels in `docker-compose.yml`.
```
```yaml
scraperr:
environment:
- HOSTNAME=localhost # your public domain, or localhost if running locally
- NEXT_PUBLIC_API_PATH=http://scraperr_api.$HOSTNAME # address to api
labels:
- "traefik.enable=true"
- "traefik.http.routers.scraperr.rule=Host(`localhost`)" # change this to your domain, if not running on localhost
- "traefik.http.routers.scraperr.entrypoints=web" # websecure if using https
- "traefik.http.services.scraperr.loadbalancer.server.port=3000"
scraperr_api
scraperr_api:
environment:
- HOSTNAME=localhost # needs to be the same as scraperr
- MONGODB_URI=mongodb://root:example@webscrape-mongo:27017 # used to access MongoDB
- SECRET_KEY=your_secret_key # used to encode authentication tokens (can be a random string)
- ALGORITHM=HS256 # authentication encoding algorithm
- ACCESS_TOKEN_EXPIRE_MINUTES=600 # access token expire minutes
labels:
- "traefik.enable=true"
- "traefik.http.routers.scraperr_api.rule=Host(`localhost`) && PathPrefix(`/api`)" # change this to your domain, if not running on localhost
- "traefik.http.routers.scraperr_api.entrypoints=web" # websecure if using https
- "traefik.http.middlewares.api-stripprefix.stripprefix.prefixes=/api"
- "traefik.http.routers.scraperr_api.middlewares=api-stripprefix"
- "traefik.http.services.scraperr_api.loadbalancer.server.port=8000"
mongo:
environment:
@@ -83,11 +91,8 @@ mongo:
MONGO_INITDB_ROOT_PASSWORD: example
```
Don't want to use `traefik`?
Setup your `docker-compose.yml` like this:
Don't want to use `traefik`? This configuration can be used in other reverse proxies, as long as the API is proxied to `/api` of the frontend container. This is currently
not able to be used without a reverse proxy, due to limitations of runtime client-side environmental variables in `next.js`.
3. Deploy

View File

@@ -27,7 +27,6 @@ from api.backend.job import (
)
from api.backend.models import (
DownloadJob,
GetStatistics,
SubmitScrapeJob,
DeleteScrapeJobs,
UpdateJobs,
@@ -55,13 +54,13 @@ app.add_middleware(
)
@app.post("/api/update")
@app.post("/update")
async def update(update_jobs: UpdateJobs, user: User = Depends(get_current_user)):
"""Used to update jobs"""
await update_job(update_jobs.ids, update_jobs.field, update_jobs.value)
@app.post("/api/submit-scrape-job")
@app.post("/submit-scrape-job")
async def submit_scrape_job(job: SubmitScrapeJob, background_tasks: BackgroundTasks):
LOG.info(f"Recieved job: {job}")
try:
@@ -76,7 +75,7 @@ async def submit_scrape_job(job: SubmitScrapeJob, background_tasks: BackgroundTa
return JSONResponse(content={"error": str(e)}, status_code=500)
@app.post("/api/retrieve-scrape-jobs")
@app.post("/retrieve-scrape-jobs")
async def retrieve_scrape_jobs(user: User = Depends(get_current_user)):
LOG.info(f"Retrieving jobs for account: {user.email}")
try:
@@ -94,7 +93,7 @@ def clean_text(text: str):
return text
@app.post("/api/download")
@app.post("/download")
async def download(download_job: DownloadJob):
LOG.info(f"Downloading job with ids: {download_job.ids}")
try:
@@ -162,7 +161,7 @@ async def download(download_job: DownloadJob):
return {"error": str(e)}
@app.post("/api/delete-scrape-jobs")
@app.post("/delete-scrape-jobs")
async def delete(delete_scrape_jobs: DeleteScrapeJobs):
result = await delete_jobs(delete_scrape_jobs.ids)
return (
@@ -172,7 +171,7 @@ async def delete(delete_scrape_jobs: DeleteScrapeJobs):
)
@app.get("/api/initial_logs")
@app.get("/initial_logs")
async def get_initial_logs():
container_id = "scraperr_api"
@@ -184,7 +183,7 @@ async def get_initial_logs():
raise HTTPException(status_code=500, detail=f"Unexpected error: {e}")
@app.get("/api/logs")
@app.get("/logs")
async def get_own_logs():
container_id = "scraperr_api"
@@ -204,12 +203,12 @@ async def get_own_logs():
raise HTTPException(status_code=500, detail=str(e))
@app.get("/api/statistics/get-average-element-per-link")
@app.get("/statistics/get-average-element-per-link")
async def get_average_element_per_link(user: User = Depends(get_current_user)):
return await average_elements_per_link(user.email)
@app.get("/api/statistics/get-average-jobs-per-day")
@app.get("/statistics/get-average-jobs-per-day")
async def average_jobs_per_day(user: User = Depends(get_current_user)):
data = await get_jobs_per_day(user.email)
return data

View File

@@ -19,7 +19,7 @@ from api.backend.auth.auth_utils import (
auth_router = APIRouter()
@auth_router.post("/api/auth/token", response_model=Token)
@auth_router.post("/auth/token", response_model=Token)
async def login_for_access_token(form_data: OAuth2PasswordRequestForm = Depends()):
user = await authenticate_user(form_data.username, form_data.password)
if not user:
@@ -41,7 +41,7 @@ async def login_for_access_token(form_data: OAuth2PasswordRequestForm = Depends(
return {"access_token": access_token, "token_type": "bearer"}
@auth_router.post("/api/auth/signup", response_model=User)
@auth_router.post("/auth/signup", response_model=User)
async def create_user(user: UserCreate):
users_collection = get_user_collection()
hashed_password = get_password_hash(user.password)
@@ -52,6 +52,6 @@ async def create_user(user: UserCreate):
return user_dict
@auth_router.get("/api/auth/users/me", response_model=User)
@auth_router.get("/auth/users/me", response_model=User)
async def read_users_me(current_user: User = Depends(get_current_user)):
return current_user

View File

@@ -6,29 +6,20 @@ services:
dockerfile: docker/frontend/Dockerfile
container_name: scraperr
command: ["npm", "run", "start"]
ports:
- 3000:3000
environment:
- HOSTNAME=localhost
- NEXT_PUBLIC_API_PATH=http://localhost:8000
labels:
- "traefik.enable=true"
- "traefik.http.routers.scraperr.rule=Host(`${HOSTNAME}`)"
- "traefik.http.routers.scraperr.rule=Host(`localhost`)" # change this to your domain, if not running on localhost
- "traefik.http.routers.scraperr.entrypoints=web" # websecure if using https
- "traefik.http.services.scraperr.loadbalancer.server.port=3000"
networks:
- web
scraperr_api:
init: True
image: jpyles0524/scraperr_api:latest
build:
context: .
dockerfile: docker/api/Dockerfile
ports:
- 8000:8000
environment:
- HOSTNAME=localhost
- MONGODB_URI=mongodb://root:example@webscrape-mongo:27017 # used to access MongoDB
- SECRET_KEY=your_secret_key # used to encode authentication tokens (can be a random string)
- ALGORITHM=HS256 # authentication encoding algorithm
@@ -38,26 +29,27 @@ services:
- /var/run/docker.sock:/var/run/docker.sock
labels:
- "traefik.enable=true"
- "traefik.http.routers.scraperr_api.rule=Host(`scraperr_api.${HOSTNAME}`)"
- "traefik.http.routers.scraperr_api.rule=Host(`localhost`) && PathPrefix(`/api`)" # change this to your domain, if not running on localhost
- "traefik.http.routers.scraperr_api.entrypoints=web" # websecure if using https
- "traefik.http.middlewares.api-stripprefix.stripprefix.prefixes=/api"
- "traefik.http.routers.scraperr_api.middlewares=api-stripprefix"
- "traefik.http.services.scraperr_api.loadbalancer.server.port=8000"
networks:
- web
# traefik:
# image: traefik:latest
# container_name: traefik
# command:
# - "--providers.docker=true"
# - "--entrypoints.web.address=:80"
# - "--entrypoints.websecure.address=:443"
# ports:
# - 80:80
# - 443:443
# volumes:
# - /var/run/docker.sock:/var/run/docker.sock:ro"
# networks:
# - web
traefik:
image: traefik:latest
container_name: traefik
command:
- "--providers.docker=true"
- "--entrypoints.web.address=:80"
- "--entrypoints.websecure.address=:443"
ports:
- 80:80
- 443:443
volumes:
- /var/run/docker.sock:/var/run/docker.sock:ro"
networks:
- web
mongo:
container_name: webscrape-mongo
image: mongo
@@ -67,6 +59,5 @@ services:
MONGO_INITDB_ROOT_PASSWORD: example
networks:
- web
networks:
web:

View File

@@ -22,7 +22,6 @@ export const AuthProvider: React.FC<AuthProps> = ({ children }) => {
const [isAuthenticated, setIsAuthenticated] = useState<boolean>(false);
useEffect(() => {
// const token = localStorage.getItem("token");
const token = Cookies.get("token");
if (token) {
axios
@@ -54,8 +53,6 @@ export const AuthProvider: React.FC<AuthProps> = ({ children }) => {
secure: false,
sameSite: "Lax",
});
// localStorage.setItem("token", response.data.access_token);
console.log(response.data.access_token);
const userResponse = await axios.get(
`${Constants.DOMAIN}/api/auth/users/me`,
{
@@ -68,7 +65,6 @@ export const AuthProvider: React.FC<AuthProps> = ({ children }) => {
const logout = () => {
Cookies.remove("token");
// localStorage.removeItem("token");
setUser(null);
setIsAuthenticated(false);
};

View File

@@ -1,3 +1,3 @@
export const Constants = {
DOMAIN: process.env.NEXT_PUBLIC_API_PATH,
DOMAIN: "",
};

View File

@@ -30,7 +30,7 @@ export const getServerSideProps: GetServerSideProps = async (context) => {
if (token) {
try {
const averageElementResponse = await fetch(
`http://scraperr_api:8000/api/statistics/get-average-element-per-link`,
`http://scraperr_api:8000/statistics/get-average-element-per-link`,
{
headers: { Authorization: `Bearer ${token}` },
}
@@ -39,7 +39,7 @@ export const getServerSideProps: GetServerSideProps = async (context) => {
averageElement = await averageElementResponse.json();
const averageJobResponse = await fetch(
`http://scraperr_api:8000/api/statistics/get-average-jobs-per-day`,
`http://scraperr_api:8000/statistics/get-average-jobs-per-day`,
{
headers: { Authorization: `Bearer ${token}` },
}