diff --git a/src/crawler/crawler.service.ts b/src/crawler/crawler.service.ts index 95ad45a..210a579 100644 --- a/src/crawler/crawler.service.ts +++ b/src/crawler/crawler.service.ts @@ -25,31 +25,13 @@ export class CrawlerService { // STYLESHEETS // const stylesheetsUrls = await page.$$eval('link[rel="stylesheet"]', links => links.map(link => link.href)); let cssDir = `${directory}/css/` - if (!fs.existsSync(cssDir)) { - mkdirSync(cssDir); - } - stylesheetsUrls.forEach(async (stylesheetUrl) => { - if(!stylesheetUrl.startsWith('http')) return; - const response = await axios.get(stylesheetUrl); - const content = response.data; - fs.writeFileSync(`${cssDir}${stylesheetUrl.split('/').pop()}`, content); - }); + await this.downloadFile(stylesheetsUrls, cssDir); // STYLESHEETS // // SCRIPTS // const scriptsUrls = await page.$$eval('script', scripts => scripts.map(script => script.src)); let scriptsDir = `${directory}/scripts/` - if (!fs.existsSync(scriptsDir)) { - mkdirSync(scriptsDir); - } - console.log(scriptsUrls) - - scriptsUrls.forEach(async (scriptUrl) => { - if (!scriptUrl.startsWith('http')) return; - const response = await axios.get(scriptUrl); - const content = response.data; - fs.writeFileSync(`${scriptsDir}${scriptUrl.split('/').pop()}`, content); - }); + await this.downloadFile(scriptsUrls, scriptsDir); // SCRIPTS // // SCREENSHOT // @@ -70,6 +52,20 @@ export class CrawlerService { await browser.close(); } + async downloadFile(urls: string[], path: string) { + if (!fs.existsSync(path)) { + mkdirSync(path); + } + console.log(urls) + + urls.forEach(async (url) => { + if (!url.startsWith('http')) return; + const response = await axios.get(url); + const content = response.data; + fs.writeFileSync(`${path}${url.split('/').pop()}`, content); + }); + } + extractDomain(urlString: string) { const url = new URL(urlString); return url.hostname;