wip: separate frontend from backend

This commit is contained in:
Jayden
2024-07-23 20:53:15 -05:00
parent 0704c5ae15
commit 4d31adcca9
12 changed files with 77 additions and 35 deletions

View File

@@ -2,25 +2,35 @@ name: ci
on:
push:
branches: ["master"]
# pull_request:
# branches: ["master"]
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Build and push
- name: Build and push frontend
uses: docker/build-push-action@v5
with:
context: .
file: ./Dockerfile
file: ./docker/frontend/Dockerfile
push: true
tags: ${{ secrets.DOCKERHUB_USERNAME }}/${{secrets.DOCKERHUB_REPO}}:latest
tags: ${{ secrets.DOCKERHUB_USERNAME }}/{{ DOCKERHUB_REPO }}:latest # Tag for the first image
- name: Build and push api
uses: docker/build-push-action@v5
with:
context: .
file: ./docker/api/Dockerfile
push: true
tags: ${{ secrets.DOCKERHUB_USERNAME }}/scraperr_api:latest # Tag for the second image

1
.gitignore vendored
View File

@@ -185,3 +185,4 @@ cython_debug/
.pdm-python
.next
postgres_data
.vscode

View File

@@ -1,3 +0,0 @@
{
"python.languageServer": "None"
}

View File

@@ -186,7 +186,7 @@ async def delete(delete_scrape_jobs: DeleteScrapeJobs):
@app.get("/api/logs")
async def get_own_logs():
container_id = "scraperr"
container_id = "scraperr_api"
try:
container = client.containers.get(container_id)
log_stream = container.logs(stream=True, follow=True)

View File

@@ -69,8 +69,7 @@ def create_driver():
chrome_options.add_argument("--disable-dev-shm-usage")
chrome_options.add_argument(f"user-agent={ua.random}")
service = Service(ChromeDriverManager().install())
return webdriver.Chrome(options=chrome_options, service=service)
return webdriver.Chrome(options=chrome_options)
async def make_site_request(

View File

@@ -3,12 +3,13 @@ services:
scraperr:
labels:
- "traefik.enable=true"
- "traefik.http.routers.frontend.rule=Host(`${HOSTNAME_DEV}`)"
- "traefik.http.routers.frontend.entrypoints=web"
- "traefik.http.services.frontend.loadbalancer.server.port=8000"
- "traefik.http.routers.frontend.tls=false"
- "traefik.http.routers.scraperr.rule=Host(`${HOSTNAME_DEV}`)"
- "traefik.http.routers.scraperr.entrypoints=web"
- "traefik.http.services.scraperr.loadbalancer.server.port=3000"
- "traefik.http.routers.scraperr.tls=false"
volumes:
- "$PWD/dist:/project/dist"
- "$PWD/src:/project/src"
- "$PWD/api/backend:/project/api/backend"
ports:
- "8000:8000"
- "3000:3000"

View File

@@ -1,24 +1,39 @@
services:
scraperr:
image: jpyles0524/scraperr:latest
init: True
build:
context: ./
context: .
dockerfile: docker/frontend/Dockerfile
container_name: scraperr
ports:
- 9000:8000
env_file:
- ./.env
labels:
- "traefik.enable=true"
- "traefik.http.routers.scraperr.rule=Host(`${HOSTNAME}`)"
- "traefik.http.routers.scraperr.entrypoints=web" # websecure if using https
- "traefik.http.services.scraperr.loadbalancer.server.port=3000"
networks:
- web
scraperr_api:
init: True
image: jpyles0524/scraperr_api:latest
build:
context: .
dockerfile: docker/api/Dockerfile
container_name: scraperr_api
env_file:
- ./.env
volumes:
- /var/run/docker.sock:/var/run/docker.sock
labels:
- "traefik.enable=true"
- "traefik.http.routers.frontend.rule=Host(`${HOSTNAME}`)"
- "traefik.http.routers.frontend.entrypoints=web" # websecure if using https
- "traefik.http.services.frontend.loadbalancer.server.port=8000"
# - "traefik.http.routers.frontend.tls=true"
- "traefik.http.routers.scraperr_api.rule=Host(`scraperr_api.${HOSTNAME}`)"
- "traefik.http.routers.scraperr_api.entrypoints=web" # websecure if using https
- "traefik.http.services.scraperr_api.loadbalancer.server.port=8000"
networks:
- web
traefik:
image: traefik:latest
container_name: traefik
@@ -26,15 +41,11 @@ services:
- "--providers.docker=true"
- "--entrypoints.web.address=:80"
- "--entrypoints.websecure.address=:443"
# - "--providers.file.filename=/etc/traefik/dynamic_conf.yaml"
ports:
- 80:80
- 443:443
volumes:
- "/var/run/docker.sock:/var/run/docker.sock:ro"
# - "./dynamic_conf.yaml:/etc/traefik/dynamic_conf.yaml"
# - "/etc/letsencrypt/live/domain/fullchain.pem:/etc/certs/ssl-cert.pem"
# - "/etc/letsencrypt/live/domain/privkey.pem:/etc/certs/ssl-cert.key"
- /var/run/docker.sock:/var/run/docker.sock:ro"
networks:
- web
mongo:
@@ -46,5 +57,6 @@ services:
MONGO_INITDB_ROOT_PASSWORD: example
networks:
- web
networks:
web:

View File

@@ -51,4 +51,3 @@ EXPOSE 8000
WORKDIR /project/
CMD [ "supervisord", "-c", "/etc/supervisor/conf.d/supervisord.conf" ]

View File

@@ -0,0 +1,19 @@
# Build next dependencies
FROM node:latest
WORKDIR /app
COPY package*.json ./
RUN npm install
COPY public /app/public
COPY src /app/src
COPY tsconfig.json /app/tsconfig.json
COPY tailwind.config.js /app/tailwind.config.js
COPY next.config.mjs /app/next.config.mjs
COPY postcss.config.js /app/postcss.config.js
RUN npm run build
EXPOSE 3000
CMD [ "npm", "run", "start" ]

View File

@@ -1,10 +1,12 @@
import dotenv from "dotenv";
dotenv.config();
/** @type {import('next').NextConfig} */
const nextConfig = {
// output: "export",
distDir: "./dist",
images: { unoptimized: true },
env: {
DOMAIN: "http://localhost:8000",
DOMAIN: `${process.env.NEXT_PUBLIC_API_PATH}`,
},
};

View File

@@ -1,3 +1,4 @@
export const Constants = {
DOMAIN: process.env.DOMAIN,
// DOMAIN: process.env.NEXT_PUBLIC_API_PATH,
DOMAIN: "http://scraperr_api.localhost",
};

View File

@@ -1,13 +1,14 @@
import { Container, IconButton } from "@mui/material";
import { ArrowUpward, ArrowDownward } from "@mui/icons-material";
import { useEffect, useRef, useState } from "react";
import { Constants } from "../lib";
const Logs = () => {
const [logs, setLogs] = useState("");
const logsContainerRef = useRef<HTMLDivElement>(null);
useEffect(() => {
const eventSource = new EventSource("/api/logs");
const eventSource = new EventSource(`${Constants.DOMAIN}/api/logs`);
eventSource.onmessage = (event) => {
setLogs((prevLogs) => prevLogs + event.data + "\n");