mirror of
https://github.com/jaypyles/Scraperr.git
synced 2025-12-15 04:05:50 +00:00
chore: remove logging
This commit is contained in:
@@ -26,8 +26,6 @@ class HtmlElement(_Element): ...
|
||||
def is_same_domain(url: str, original_url: str) -> bool:
|
||||
parsed_url = urlparse(url)
|
||||
parsed_original_url = urlparse(original_url)
|
||||
LOG.info(f"PARSED: {parsed_url.netloc}")
|
||||
LOG.info(f"PARSED_ORIGINAL: {parsed_original_url.netloc}")
|
||||
return parsed_url.netloc == parsed_original_url.netloc or parsed_url.netloc == ""
|
||||
|
||||
|
||||
@@ -88,8 +86,6 @@ async def make_site_request(
|
||||
if url in visited_urls:
|
||||
return
|
||||
|
||||
LOG.info(f"Visited URLs: {visited_urls}")
|
||||
|
||||
driver = create_driver()
|
||||
driver.implicitly_wait(10)
|
||||
|
||||
@@ -100,7 +96,6 @@ async def make_site_request(
|
||||
LOG.info(f"Visiting URL: {url}")
|
||||
driver.get(url)
|
||||
final_url = driver.current_url
|
||||
LOG.info(f"Final URL: {final_url}")
|
||||
visited_urls.add(url)
|
||||
visited_urls.add(final_url)
|
||||
_ = WebDriverWait(driver, 10).until(
|
||||
@@ -120,7 +115,6 @@ async def make_site_request(
|
||||
|
||||
for a_tag in soup.find_all("a"):
|
||||
link = a_tag.get("href")
|
||||
LOG.info(f"Found Link: {link}")
|
||||
|
||||
if link:
|
||||
if not urlparse(link).netloc:
|
||||
|
||||
@@ -83,7 +83,6 @@ const JobTable: React.FC<JobTableProps> = ({ jobs, fetchJobs }) => {
|
||||
};
|
||||
|
||||
const handleNavigate = (elements: Object[], url: string, options: any) => {
|
||||
console.log(options);
|
||||
router.push({
|
||||
pathname: "/",
|
||||
query: {
|
||||
|
||||
@@ -111,7 +111,6 @@ export const JobSubmitter = ({ stateProps }: Props) => {
|
||||
return response.json();
|
||||
})
|
||||
.then((data) => {
|
||||
console.log(data);
|
||||
setSnackbarMessage(data);
|
||||
setSnackbarSeverity("info");
|
||||
setSnackbarOpen(true);
|
||||
|
||||
Reference in New Issue
Block a user