fix: make calls to next server

This commit is contained in:
Jayden Pyles
2024-11-12 21:34:47 -06:00
parent b3bf780eda
commit dc4d219205
26 changed files with 654 additions and 105 deletions

View File

@@ -7,7 +7,7 @@ on:
jobs:
build:
if: ${{ github.event.workflow_run.conclusion == 'success' && github.ref == 'refs/heads/master' }}
if: ${{ github.event.workflow_run.conclusion == 'success' && github.ref == 'refs/heads/master' && github.event_name != 'pull_request' }}
runs-on: ubuntu-latest
steps:
- name: Checkout

View File

@@ -64,10 +64,31 @@ async def average_elements_per_link(user: str):
collection = get_job_collection()
pipeline = [
{"$match": {"status": "Completed", "user": user}},
{
"$addFields": {
"time_created_date": {
"$cond": {
"if": {"$eq": [{"$type": "$time_created"}, "date"]},
"then": "$time_created",
"else": {
"$convert": {
"input": "$time_created",
"to": "date",
"onError": None,
"onNull": None,
}
},
}
}
}
},
{
"$project": {
"date": {
"$dateToString": {"format": "%Y-%m-%d", "date": "$time_created"}
"$dateToString": {
"format": "%Y-%m-%d",
"date": "$time_created_date",
}
},
"num_elements": {"$size": "$elements"},
}
@@ -100,10 +121,31 @@ async def get_jobs_per_day(user: str):
collection = get_job_collection()
pipeline = [
{"$match": {"status": "Completed", "user": user}},
{
"$addFields": {
"time_created_date": {
"$cond": {
"if": {"$eq": [{"$type": "$time_created"}, "date"]},
"then": "$time_created",
"else": {
"$convert": {
"input": "$time_created",
"to": "date",
"onError": None,
"onNull": None,
}
},
}
}
}
},
{
"$project": {
"date": {
"$dateToString": {"format": "%Y-%m-%d", "date": "$time_created"}
"$dateToString": {
"format": "%Y-%m-%d",
"date": "$time_created_date",
}
}
}
},

View File

@@ -1,5 +1,7 @@
services:
scraperr:
depends_on:
- scraperr_api
image: jpyles0524/scraperr:latest
build:
context: .
@@ -7,7 +9,7 @@ services:
container_name: scraperr
command: ["npm", "run", "start"]
environment:
- NEXT_PUBLIC_API_URL=http://localhost:8000 # your API URL
- NEXT_PUBLIC_API_URL=http://scraperr_api:8000 # your API URL
- SERVER_URL=http://scraperr_api:8000 # your docker container API URL
ports:
- 80:3000

View File

@@ -48,14 +48,11 @@ export const JobTable: React.FC<JobTableProps> = ({ jobs, setJobs }) => {
const router = useRouter();
const handleDownload = async (ids: string[]) => {
const response = await fetch(
`${process.env.NEXT_PUBLIC_API_URL}/api/download`,
{
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ ids: ids }),
}
);
const response = await fetch("/api/download", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ data: { ids: ids } }),
});
if (response.ok) {
const blob = await response.blob();
@@ -107,14 +104,11 @@ export const JobTable: React.FC<JobTableProps> = ({ jobs, setJobs }) => {
};
const handleDeleteSelected = async () => {
const response = await fetch(
`${process.env.NEXT_PUBLIC_API_URL}/api/delete-scrape-jobs`,
{
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ ids: Array.from(selectedJobs) }),
}
);
const response = await fetch("/api/delete", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ data: { ids: Array.from(selectedJobs) } }),
});
if (response.ok) {
setJobs((jobs) =>
@@ -148,13 +142,13 @@ export const JobTable: React.FC<JobTableProps> = ({ jobs, setJobs }) => {
value: value,
};
await fetch(`${process.env.NEXT_PUBLIC_API_URL}/api/update`, {
await fetch("/api/update", {
method: "POST",
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${token}`,
},
body: JSON.stringify(postBody),
body: JSON.stringify({ data: postBody }),
});
};

View File

@@ -14,19 +14,24 @@ export const LogContainer: React.FC<LogContainerProps> = ({ initialLogs }) => {
const logsContainerRef = useRef<HTMLDivElement | null>(null);
useEffect(() => {
const eventSource = new EventSource(`${Constants.DOMAIN}/api/logs`);
const eventSource = new EventSource(`/api/logs`);
setLogs("");
eventSource.onmessage = (event) => {
setLogs((prevLogs) => prevLogs + event.data + "\n");
if (logsContainerRef.current) {
logsContainerRef.current.scrollTop =
logsContainerRef.current.scrollHeight;
}
};
eventSource.onerror = () => {
eventSource.onopen = (e) => {
};
eventSource.onerror = (error) => {
console.error("EventSource failed:", error);
eventSource.close();
};

View File

@@ -25,7 +25,7 @@ export const AuthProvider: React.FC<AuthProps> = ({ children }) => {
const token = Cookies.get("token");
if (token) {
axios
.get(`${process.env.NEXT_PUBLIC_API_URL}/api/auth/users/me`, {
.get(`/api/me`, {
headers: { Authorization: `Bearer ${token}` },
})
.then((response) => {
@@ -42,10 +42,8 @@ export const AuthProvider: React.FC<AuthProps> = ({ children }) => {
const params = new URLSearchParams();
params.append("username", email);
params.append("password", password);
const response = await axios.post(
`${process.env.NEXT_PUBLIC_API_URL}/api/auth/token`,
params
);
const response = await axios.post(`/api/token`, params);
Cookies.set("token", response.data.access_token, {
expires: 7,
path: "/",
@@ -53,12 +51,10 @@ export const AuthProvider: React.FC<AuthProps> = ({ children }) => {
secure: false,
sameSite: "Lax",
});
const userResponse = await axios.get(
`${process.env.NEXT_PUBLIC_API_URL}/api/auth/users/me`,
{
headers: { Authorization: `Bearer ${response.data.access_token}` },
}
);
const userResponse = await axios.get(`/api/me`, {
headers: { Authorization: `Bearer ${response.data.access_token}` },
});
setUser(userResponse.data);
setIsAuthenticated(true);
};

View File

@@ -11,13 +11,13 @@ export const fetchJobs = async (
fetchOptions: fetchOptions = {}
) => {
const token = Cookies.get("token");
await fetch(`${process.env.NEXT_PUBLIC_API_URL}/api/retrieve-scrape-jobs`, {
await fetch("/api/retrieve", {
method: "POST",
headers: {
"content-type": "application/json",
Authorization: `Bearer ${token}`,
},
body: JSON.stringify(fetchOptions),
body: JSON.stringify({ data: fetchOptions }),
})
.then((response) => response.json())
.then((data) => setJobs(data))
@@ -29,15 +29,12 @@ export const fetchJobs = async (
export const fetchJob = async (id: string) => {
const token = Cookies.get("token");
try {
const response = await fetch(
`${process.env.NEXT_PUBLIC_API_URL}/api/job/${id}`,
{
headers: {
"content-type": "application/json",
Authorization: `Bearer ${token}`,
},
}
);
const response = await fetch(`/api/job/${id}`, {
headers: {
"content-type": "application/json",
Authorization: `Bearer ${token}`,
},
});
const data = await response.json();
return data;
} catch (error) {
@@ -51,15 +48,12 @@ export const checkAI = async (
) => {
const token = Cookies.get("token");
try {
const response = await fetch(
`${process.env.NEXT_PUBLIC_API_URL}/api/ai/check`,
{
headers: {
"content-type": "application/json",
Authorization: `Bearer ${token}`,
},
}
);
const response = await fetch("/api/ai/check", {
headers: {
"content-type": "application/json",
Authorization: `Bearer ${token}`,
},
});
const data = await response.json();
setAiEnabled(data);
} catch (error) {
@@ -75,13 +69,13 @@ export const updateJob = async (ids: string[], field: string, value: any) => {
field: field,
value: value,
};
await fetch(`${process.env.NEXT_PUBLIC_API_URL}/api/update`, {
await fetch("/api/update", {
method: "POST",
headers: {
"content-type": "application/json",
Authorization: `Bearer ${token}`,
},
body: JSON.stringify(postBody),
body: JSON.stringify({ data: postBody }),
}).catch((error) => {
console.error("Error fetching jobs:", error);
});

30
src/pages/api/ai/check.ts Normal file
View File

@@ -0,0 +1,30 @@
import { NextApiRequest, NextApiResponse } from "next";
export default async function handler(
req: NextApiRequest,
res: NextApiResponse
) {
try {
const headers = new Headers(req.headers as Record<string, string>);
headers.set("content-type", "application/json");
headers.set("Authorization", `Bearer ${req.headers.authorization}`);
const response = await fetch(
`${global.process.env.NEXT_PUBLIC_API_URL}/api/ai/check`,
{
method: "GET",
headers,
}
);
if (!response.ok) {
throw new Error(`Error: ${response.statusText}`);
}
const result = await response.json();
res.status(200).json(result);
} catch (error) {
console.error("Error submitting scrape job:", error);
res.status(500).json({ error: "Internal Server Error" });
}
}

56
src/pages/api/ai/index.ts Normal file
View File

@@ -0,0 +1,56 @@
import { NextApiRequest, NextApiResponse } from "next";
export default async function handler(
req: NextApiRequest,
res: NextApiResponse
) {
const { data } = req.body;
try {
const response = await fetch(`${process.env.NEXT_PUBLIC_API_URL}/api/ai`, {
method: "POST",
headers: {
Accept: "text/event-stream",
"Content-Type": "application/json",
},
body: JSON.stringify(data),
});
if (!response.ok) {
const errorDetails = await response.text();
if (response.status === 422) {
console.error(`422 Error: ${errorDetails}`);
}
throw new Error(
`Error fetching logs: ${response.statusText} - ${errorDetails}`
);
}
if (!response.body) {
throw new Error(`No response body from API`);
}
res.writeHead(200, {
"Content-Type": "text/event-stream",
"Cache-Control": "no-cache, no-transform",
Connection: "keep-alive",
"Transfer-Encoding": "chunked",
});
let responseStream = response.body;
const reader = responseStream.getReader();
const decoder = new TextDecoder();
while (true) {
const { done, value } = await reader.read();
if (done) break;
const chunk = decoder.decode(value, { stream: true });
res.write(`${chunk}`);
}
res.end();
} catch (error) {
console.error("Error streaming logs:", error);
res.status(500).json({ error: "Internal Server Error" });
}
}

38
src/pages/api/delete.ts Normal file
View File

@@ -0,0 +1,38 @@
import { NextApiRequest, NextApiResponse } from "next";
export default async function handler(
req: NextApiRequest,
res: NextApiResponse
) {
if (req.method === "POST") {
const { data } = req.body;
const headers = new Headers();
headers.set("content-type", "application/json");
headers.set("Authorization", `Bearer ${req.headers.authorization}`);
try {
const response = await fetch(
`${global.process.env.NEXT_PUBLIC_API_URL}/api/delete-scrape-jobs`,
{
method: "POST",
headers,
body: JSON.stringify(data),
}
);
if (!response.ok) {
throw new Error(`Error: ${response.statusText}`);
}
const result = await response.json();
res.status(200).json(result);
} catch (error) {
console.error("Error submitting scrape job:", error);
res.status(500).json({ error: "Internal Server Error" });
}
} else {
res.setHeader("Allow", ["POST"]);
res.status(405).end(`Method ${req.method} Not Allowed`);
}
}

37
src/pages/api/download.ts Normal file
View File

@@ -0,0 +1,37 @@
import { NextApiRequest, NextApiResponse } from "next";
export default async function handler(
req: NextApiRequest,
res: NextApiResponse
) {
if (req.method === "POST") {
const { data } = req.body;
const headers = new Headers();
headers.set("content-type", "application/json");
try {
const response = await fetch(
`${global.process.env.NEXT_PUBLIC_API_URL}/api/download`,
{
method: "POST",
headers,
body: JSON.stringify(data),
}
);
if (!response.ok) {
throw new Error(`Error: ${response.statusText}`);
}
const csvText = await response.text();
res.status(200).send(csvText);
} catch (error) {
console.error("Error submitting scrape job:", error);
res.status(500).json({ error: "Internal Server Error" });
}
} else {
res.setHeader("Allow", ["POST"]);
res.status(405).end(`Method ${req.method} Not Allowed`);
}
}

View File

@@ -0,0 +1,30 @@
import { NextApiRequest, NextApiResponse } from "next";
export default async function handler(
req: NextApiRequest,
res: NextApiResponse
) {
const headers = new Headers();
headers.set("content-type", "application/json");
headers.set("Authorization", `Bearer ${req.headers.authorization}`);
try {
const response = await fetch(
`${process.env.NEXT_PUBLIC_API_URL}/api/statistics/get-average-element-per-link`,
{
method: "GET",
headers,
}
);
if (!response.ok) {
throw new Error(`Error: ${response.statusText}`);
}
const csvText = await response.text();
res.status(200).send(csvText);
} catch (error) {
console.error("Error submitting scrape job:", error);
res.status(500).json({ error: "Internal Server Error" });
}
}

View File

@@ -0,0 +1,30 @@
import { NextApiRequest, NextApiResponse } from "next";
export default async function handler(
req: NextApiRequest,
res: NextApiResponse
) {
const headers = new Headers();
headers.set("content-type", "application/json");
headers.set("Authorization", `Bearer ${req.headers.authorization}`);
try {
const response = await fetch(
`${global.process.env.NEXT_PUBLIC_API_URL}/api/statistics/get-average-jobs-per-day`,
{
method: "GET",
headers,
}
);
if (!response.ok) {
throw new Error(`Error: ${response.statusText}`);
}
const csvText = await response.text();
res.status(200).send(csvText);
} catch (error) {
console.error("Error submitting scrape job:", error);
res.status(500).json({ error: "Internal Server Error" });
}
}

31
src/pages/api/job/[id].ts Normal file
View File

@@ -0,0 +1,31 @@
import { NextApiRequest, NextApiResponse } from "next";
export default async function handler(
req: NextApiRequest,
res: NextApiResponse
) {
const { id } = req.query;
const headers = new Headers();
headers.set("content-type", "application/json");
headers.set("Authorization", `Bearer ${req.headers.authorization}`);
try {
const response = await fetch(
`${global.process.env.NEXT_PUBLIC_API_URL}/api/job/${id}`,
{
headers,
}
);
if (!response.ok) {
throw new Error(`Error: ${response.statusText}`);
}
const result = await response.json();
res.status(200).json(result);
} catch (error) {
console.error("Error submitting scrape job:", error);
res.status(500).json({ error: "Internal Server Error" });
}
}

45
src/pages/api/logs.ts Normal file
View File

@@ -0,0 +1,45 @@
import { NextApiRequest, NextApiResponse } from "next";
export default async function handler(
req: NextApiRequest,
res: NextApiResponse
) {
try {
const response = await fetch(
`${process.env.NEXT_PUBLIC_API_URL}/api/logs`,
{
method: "GET",
headers: {
Accept: "text/event-stream",
},
}
);
if (!response.ok || !response.body) {
throw new Error(`Error fetching logs: ${response.statusText}`);
}
res.writeHead(200, {
"Content-Type": "text/event-stream",
"Cache-Control": "no-cache, no-transform",
Connection: "keep-alive",
"Transfer-Encoding": "chunked",
});
let responseStream = response.body;
const reader = responseStream.getReader();
const decoder = new TextDecoder();
while (true) {
const { done, value } = await reader.read();
if (done) break;
const chunk = decoder.decode(value, { stream: true });
res.write(`data: ${chunk}\n\n`);
}
res.end();
} catch (error) {
console.error("Error streaming logs:", error);
res.status(500).json({ error: "Internal Server Error" });
}
}

29
src/pages/api/me.ts Normal file
View File

@@ -0,0 +1,29 @@
import { NextApiRequest, NextApiResponse } from "next";
export default async function handler(
req: NextApiRequest,
res: NextApiResponse
) {
try {
const headers = new Headers(req.headers as Record<string, string>);
headers.set("content-type", "application/json");
const response = await fetch(
`${global.process.env.NEXT_PUBLIC_API_URL}/api/auth/users/me`,
{
method: "GET",
headers,
}
);
if (!response.ok) {
throw new Error(`Error: ${response.statusText}`);
}
const result = await response.json();
res.status(200).json(result);
} catch (error) {
console.error("Error submitting scrape job:", error);
res.status(500).json({ error: "Internal Server Error" });
}
}

38
src/pages/api/retrieve.ts Normal file
View File

@@ -0,0 +1,38 @@
import { NextApiRequest, NextApiResponse } from "next";
export default async function handler(
req: NextApiRequest,
res: NextApiResponse
) {
if (req.method === "POST") {
const { data } = req.body;
const headers = new Headers();
headers.set("content-type", "application/json");
headers.set("Authorization", `Bearer ${req.headers.authorization}`);
try {
const response = await fetch(
`${global.process.env.NEXT_PUBLIC_API_URL}/api/retrieve-scrape-jobs`,
{
method: "POST",
headers,
body: JSON.stringify(data),
}
);
if (!response.ok) {
throw new Error(`Error: ${response.statusText}`);
}
const result = await response.json();
res.status(200).json(result);
} catch (error) {
console.error("Error submitting scrape job:", error);
res.status(500).json({ error: "Internal Server Error" });
}
} else {
res.setHeader("Allow", ["POST"]);
res.status(405).end(`Method ${req.method} Not Allowed`);
}
}

37
src/pages/api/signup.ts Normal file
View File

@@ -0,0 +1,37 @@
import { NextApiRequest, NextApiResponse } from "next";
export default async function handler(
req: NextApiRequest,
res: NextApiResponse
) {
if (req.method === "POST") {
const { data } = req.body;
const headers = new Headers();
headers.set("content-type", "application/json");
try {
const response = await fetch(
`${global.process.env.NEXT_PUBLIC_API_URL}/api/auth/signup`,
{
method: "POST",
headers,
body: JSON.stringify(data),
}
);
if (!response.ok) {
throw new Error(`Error: ${response.statusText}`);
}
const result = await response.json();
res.status(200).json(result);
} catch (error) {
console.error("Error submitting scrape job:", error);
res.status(500).json({ error: "Internal Server Error" });
}
} else {
res.setHeader("Allow", ["POST"]);
res.status(405).end(`Method ${req.method} Not Allowed`);
}
}

View File

@@ -0,0 +1,37 @@
import { NextApiRequest, NextApiResponse } from "next";
export default async function handler(
req: NextApiRequest,
res: NextApiResponse
) {
if (req.method === "POST") {
const { data } = req.body;
const headers = new Headers(req.headers as Record<string, string>);
headers.set("content-type", "application/json");
try {
const response = await fetch(
`${global.process.env.NEXT_PUBLIC_API_URL}/api/submit-scrape-job`,
{
method: "POST",
headers,
body: JSON.stringify(data),
}
);
if (!response.ok) {
throw new Error(`Error: ${response.statusText}`);
}
const result = await response.json();
res.status(200).json(result);
} catch (error) {
console.error("Error submitting scrape job:", error);
res.status(500).json({ error: "Internal Server Error" });
}
} else {
res.setHeader("Allow", ["POST"]);
res.status(405).end(`Method ${req.method} Not Allowed`);
}
}

39
src/pages/api/token.ts Normal file
View File

@@ -0,0 +1,39 @@
import { NextApiRequest, NextApiResponse } from "next";
export default async function handler(
req: NextApiRequest,
res: NextApiResponse
) {
if (req.method === "POST") {
const body = new URLSearchParams(req.body as string);
const username = body.get("username") || "";
const password = body.get("password") || "";
const headers = new Headers();
headers.set("content-type", "application/x-www-form-urlencoded");
try {
const response = await fetch(
`${global.process.env.NEXT_PUBLIC_API_URL}/api/auth/token`,
{
method: "POST",
headers,
body: new URLSearchParams({ username, password }).toString(),
}
);
if (!response.ok) {
throw new Error(`Error: ${response.statusText}`);
}
const result = await response.json();
res.status(200).json(result);
} catch (error) {
console.error("Error submitting scrape job:", error);
res.status(500).json({ error: "Internal Server Error" });
}
} else {
res.setHeader("Allow", ["POST"]);
res.status(405).end(`Method ${req.method} Not Allowed`);
}
}

48
src/pages/api/update.ts Normal file
View File

@@ -0,0 +1,48 @@
import { NextApiRequest, NextApiResponse } from "next";
export default async function handler(
req: NextApiRequest,
res: NextApiResponse
) {
if (req.method === "POST") {
const { data } = req.body;
const headers = new Headers();
headers.set("content-type", "application/json");
headers.set("Authorization", `Bearer ${req.headers.authorization}`);
try {
const response = await fetch(
`${global.process.env.NEXT_PUBLIC_API_URL}/api/update`,
{
method: "POST",
headers,
body: JSON.stringify(data),
}
);
if (!response.ok) {
const errorDetails = await response.text();
if (response.status === 422) {
console.error(`422 Error: ${errorDetails}`);
}
throw new Error(
`Error fetching logs: ${response.statusText} - ${errorDetails}`
);
}
if (!response.ok) {
throw new Error(`Error: ${response.statusText}`);
}
const result = await response.json();
res.status(200).json(result);
} catch (error) {
console.error("Error submitting scrape job:", error);
res.status(500).json({ error: "Internal Server Error" });
}
} else {
res.setHeader("Allow", ["POST"]);
res.status(405).end(`Method ${req.method} Not Allowed`);
}
}

View File

@@ -81,12 +81,14 @@ const AI: React.FC = () => {
}. The following messages will pertain to the content of the scraped job.`,
};
const response = await fetch(`${process.env.NEXT_PUBLIC_API_URL}/api/ai`, {
const response = await fetch("/api/ai", {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({ messages: [jobMessage, ...messages, newMessage] }),
body: JSON.stringify({
data: { messages: [jobMessage, ...messages, newMessage] },
}),
});
const updatedMessages = [...messages, newMessage];

View File

@@ -21,26 +21,21 @@ export const getServerSideProps: GetServerSideProps = async (context) => {
if (token) {
try {
const userResponse = await axios.get(
`${process.env.SERVER_URL}/api/auth/users/me`,
{
headers: { Authorization: `Bearer ${token}` },
}
);
const userResponse = await axios.get(`/api/me`, {
headers: { Authorization: `Bearer ${token}` },
});
user = userResponse.data;
const jobsResponse = await axios.post(
`${process.env.SERVER_URL}/api/retrieve-scrape-jobs`,
{ user: user.email },
{
headers: {
"content-type": "application/json",
Authorization: `Bearer ${token}`,
},
}
);
const jobsResponse = await fetch(`/api/retrieve-scrape-jobs`, {
method: "POST",
body: JSON.stringify({ user: user.email }),
headers: {
"content-type": "application/json",
Authorization: `Bearer ${token}`,
},
});
initialJobs = jobsResponse.data;
initialJobs = await jobsResponse.json();
} catch (error) {
console.error("Error fetching user or jobs:", error);
}

View File

@@ -27,10 +27,12 @@ const AuthForm: React.FC = () => {
alert("Login successful");
router.push("/");
} else {
await axios.post(`${Constants.DOMAIN}/api/auth/signup`, {
email: email,
password: password,
full_name: fullName,
await axios.post(`/api/signup`, {
data: {
email: email,
password: password,
full_name: fullName,
},
});
alert("Signup successful");
router.push("/login");

View File

@@ -75,15 +75,12 @@ const Statistics: React.FC<StatProps> = ({ averageElement, averageJob }) => {
const fetchElementsData = async () => {
try {
const response = await fetch(
`${process.env.NEXT_PUBLIC_API_URL}/api/statistics/get-average-element-per-link`,
{
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${token}`,
},
}
);
const response = await fetch("/api/get-average-element-per-link", {
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${token}`,
},
});
const data = await response.json();
setElementsData(data);
} catch (error) {
@@ -93,10 +90,8 @@ const Statistics: React.FC<StatProps> = ({ averageElement, averageJob }) => {
const fetchJobsData = async () => {
try {
const response = await fetch(
`${process.env.NEXT_PUBLIC_API_URL}/api/statistics/get-average-jobs-per-day`,
{
headers: {
const response = await fetch("/api/get-average-jobs-per-day", {
headers: {
"Content-Type": "application/json",
Authorization: `Bearer ${token}`,
},

View File

@@ -1,5 +1,3 @@
import { Constants } from "@/lib";
export const submitJob = async (
submittedURL: string,
rows: any[],
@@ -7,22 +5,21 @@ export const submitJob = async (
jobOptions: any,
customHeaders: any
) => {
return await fetch(
`${process.env.NEXT_PUBLIC_API_URL}/api/submit-scrape-job`,
{
method: "POST",
headers: { "content-type": "application/json" },
body: JSON.stringify({
return await fetch(`/api/submit-scrape-job`, {
method: "POST",
headers: { "content-type": "application/json" },
body: JSON.stringify({
data: {
url: submittedURL,
elements: rows,
user: user?.email,
time_created: new Date().toISOString(),
job_options: {
...jobOptions,
custom_headers: customHeaders,
custom_headers: customHeaders || {},
proxies: jobOptions.proxies ? jobOptions.proxies.split(",") : [],
},
}),
}
);
},
}),
});
};