Compare commits

...

11 commits

16 changed files with 2219 additions and 776 deletions

1
.dockerignore Normal file
View file

@ -0,0 +1 @@
node_modules

2
.gitignore vendored
View file

@ -4,7 +4,7 @@
# Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
#poppeteer's output #poppeteer's output
crawler_assets/**/* sites_assets/**/*
# User-specific stuff: # User-specific stuff:

View file

@ -1,13 +1,14 @@
FROM ubuntu:20.04 AS base FROM node:14-alpine AS base
# Set non-interactive mode # Set non-interactive mode
ENV DEBIAN_FRONTEND noninteractive ENV DEBIAN_FRONTEND=noninteractive
ENV NODE_ENV=production ENV NODE_ENV=production
# Install required packages # Install chromium
RUN apt-get update && \ RUN apk add --no-cache chromium chromium-chromedriver
curl -sL https://deb.nodesource.com/setup_14.x | bash - && \
apt-get install -y nodejs chromium-browser # Create a symbolic link for google-chrome
RUN ln -s /usr/bin/chromium-browser /usr/bin/google-chrome
# Set the working directory # Set the working directory
WORKDIR /app WORKDIR /app
@ -16,10 +17,11 @@ WORKDIR /app
COPY package*.json ./ COPY package*.json ./
# Install dependencies # Install dependencies
RUN npm install RUN npm install -g npm@9.6.3 && \
npm install
# Copy the source code # Copy the source code
COPY . . COPY . .
# Start the application # Start the application
CMD ["npm", "run", "start:prod"] CMD ["npm", "run", "start"]

View file

@ -6,9 +6,7 @@ The crawler is a simple crawler that crawls the web and stores the results in a
## Crawler ## ## Crawler ##
### Usage ### ### Usage ###
Post a JSON object to the crawler with the following format: Post a JSON object to the crawler with the following format:
`domain.com/crawl` `domain.com/crawl`
@ -17,4 +15,20 @@ Post a JSON object to the crawler with the following format:
} }
The crawler will then crawl the given url and store the results in a database and assets in a file system The crawler will then crawl the given url and store the results in a database and assets in a file system
`crawler_assests/www.example.com/`. `crawler_assests/www.example.com/`.
# API #
The API is a simple API that serves the results of the crawler.
# Routes #
## GET ##
/sites - Returns a list of all sites
/sites/:id - Returns the site object for the given site Id
sites/domain/:domain - Returns the domain object for the given domain
## DELETE ##
/sites/:id - Deletes the site object for the given site Id
sites/domain/:domain - Deletes the domain object for the given domain
## Post ##
sites/:id - Updates the site object for the given site Id

View file

@ -5,17 +5,17 @@ services:
ports: ports:
- '3000:3000' - '3000:3000'
depends_on: depends_on:
- db - mongo
environment: environment:
- MONGO_URL=mongodb://db:27017/nestjs - MONGO_URL=mongodb://mongo:27017/mydatabase
networks: networks:
- appnet - appnet
db: mongo:
image: mongo image: mongo
environment: environment:
- MONGO_INITDB_DATABASE=nestjs - MONGO_INITDB_DATABASE=mydatabase
- MONGO_INITDB_ROOT_USERNAME=admin - MONGO_INITDB_ROOT_USERNAME=akamai
- MONGO_INITDB_ROOT_PASSWORD=adminpassword - MONGO_INITDB_ROOT_PASSWORD=password
volumes: volumes:
- dbdata:/data/db - dbdata:/data/db
networks: networks:

2763
package-lock.json generated

File diff suppressed because it is too large Load diff

View file

@ -25,13 +25,14 @@
"@nestjs/platform-express": "^9.0.0", "@nestjs/platform-express": "^9.0.0",
"axios": "^1.3.5", "axios": "^1.3.5",
"dotenv": "^16.0.3", "dotenv": "^16.0.3",
"minimatch": "^9.0.0",
"mongoose": "^7.0.4", "mongoose": "^7.0.4",
"puppeteer": "^19.9.1", "puppeteer": "^19.9.1",
"reflect-metadata": "^0.1.13", "reflect-metadata": "^0.1.13",
"@nestjs/cli": "^9.0.0",
"rxjs": "^7.5.5" "rxjs": "^7.5.5"
}, },
"devDependencies": { "devDependencies": {
"@nestjs/cli": "^9.0.0",
"@nestjs/schematics": "^9.0.0", "@nestjs/schematics": "^9.0.0",
"@nestjs/testing": "^9.0.0", "@nestjs/testing": "^9.0.0",
"@types/express": "^4.17.13", "@types/express": "^4.17.13",

View file

@ -1,21 +1,68 @@
import { Body, Controller, Post } from '@nestjs/common'; import { Body, Controller, Delete, Get, Param, Post } from '@nestjs/common';
import { CrawlerService } from '../crawler/crawler.service'; import { CrawlerService } from '../crawler/crawler.service';
import { DbService } from '../db/db.service'; import { InjectModel } from '@nestjs/mongoose';
import { Model } from 'mongoose';
import { Site } from 'src/interfaces/site.interface';
@Controller('/') @Controller('/')
export class ApiController { export class ApiController {
constructor(private crawlerService: CrawlerService, private DbService: DbService) {}
constructor(private crawlerService: CrawlerService, @InjectModel('Site') private readonly siteModel: Model<Site> ) {}
@Post('crawl') @Post('crawl')
async crawl(@Body() body: { url: string }) { async crawl(@Body() body: { url: string }) {
const results = this.crawlerService.crawl(body.url); const results = this.crawlerService.crawl(body.url);
results.then((data) => { results.then((data) => {
console.log(data) console.log("Done crawling !", data);
this.DbService.insert(data, 'crawler'); const newSite = new this.siteModel(data);
newSite.save().then((result) => {
console.log("Site saved !", result);
}).catch((err) => {
console.log("Error saving site !", err.message);
});
}).catch((err) => { }).catch((err) => {
console.log("** Error crawling ! **", err);
console.log(err); console.log(err);
}); });
return { return {
message: 'Crawling in progress' message: 'Got your request for ' + body.url
} }
} }
// Get all
@Get('sites')
async getSites() {
const sites = await this.siteModel.find().exec();
return sites || {};
}
// Get by id
@Get('sites/:id')
async getSite(@Param('id') id: string) {
const site = await this.siteModel.findById(id).exec();
return site || {};
}
// Get by domain
@Get('sites/domain/:domain')
async getSiteByDomain(@Param('domain') domain: string) {
const site = await this.siteModel.findOne({ domain }).exec();
return site || {};
}
// Delete by domain
@Delete('sites/domain/:domain')
async deleteSiteByDomain(@Param('domain') domain: string) {
const site = await this.siteModel.findOneAndDelete({ domain }).exec();
return site || {};
}
// Delete by id
@Delete('sites/:id')
async deleteSite(@Param('id') id: string) {
const site = await this.siteModel.findByIdAndDelete(id).exec();
return site || {};
}
} }

View file

@ -1,11 +1,13 @@
import { Module } from '@nestjs/common'; import { Module } from '@nestjs/common';
import { ApiController } from './api.controller'; import { ApiController } from './api.controller';
import { CrawlerService } from '../crawler/crawler.service'; import { CrawlerService } from '../crawler/crawler.service';
import { DbService } from '../db/db.service'; import { SitesSchema } from '../schema/sites.schema';
import { MongooseModule } from '@nestjs/mongoose';
@Module({ @Module({
imports: [MongooseModule.forFeature([{ name: 'Site', schema: SitesSchema }])],
controllers: [ApiController], controllers: [ApiController],
providers: [CrawlerService, DbService] providers: [CrawlerService]
}) })
export class ApiModule {} export class ApiModule {}

View file

@ -1,9 +1,20 @@
import { Module } from '@nestjs/common'; import { Module } from '@nestjs/common';
import { MongooseModule } from '@nestjs/mongoose';
import { ApiModule } from './api/api.module'; import { ApiModule } from './api/api.module';
import { CrawlerModule } from './crawler/crawler.module'; import { CrawlerModule } from './crawler/crawler.module';
import { DbModule } from './db/db.module'; const dotenv = require('dotenv');
dotenv.config();
@Module({ @Module({
imports: [ApiModule, CrawlerModule, DbModule] imports: [
MongooseModule.forRoot(process.env.MONGO_URL, {
useNewUrlParser: true,
useUnifiedTopology: true,
}),
ApiModule,
CrawlerModule
]
}) })
export class AppModule {} export class AppModule { }

View file

@ -1,9 +1,7 @@
import { Module } from '@nestjs/common'; import { Module } from '@nestjs/common';
import { CrawlerService } from './crawler.service'; import { CrawlerService } from './crawler.service';
import { DbModule } from '../db/db.module';
@Module({ @Module({
imports: [DbModule],
providers: [CrawlerService] providers: [CrawlerService]
}) })
export class CrawlerModule {} export class CrawlerModule {}

View file

@ -4,21 +4,31 @@ import * as fs from 'fs';
import puppeteer from 'puppeteer'; import puppeteer from 'puppeteer';
import { URL } from 'url'; import { URL } from 'url';
import axios from 'axios'; import axios from 'axios';
import { Site } from '../interfaces/site.interface';
const environment = process.env.NODE_ENV || 'development';
@Injectable() @Injectable()
export class CrawlerService { export class CrawlerService {
async crawl(url: string): Promise<any> {
const browser = await puppeteer.launch({ headless: true, args: ['--no-sandbox'] }); constructor() {
if (!fs.existsSync('sites_assets')) {
mkdirSync('sites_assets');
}
}
async crawl(url: string): Promise<Site> {
console.log("start crawl website", url);
const browser = await puppeteer.launch({ executablePath: '/usr/bin/chromium-browser', headless: true, args: ['--no-sandbox'] });
const page = await browser.newPage(); const page = await browser.newPage();
const domain = this.extractDomain(url); const domain = this.extractDomain(url);
await page.goto(url); await page.goto(url);
const directory = `crawler_assets/${domain}/`; const directory = `sites_assets/${domain}/`;
if (!fs.existsSync(directory)) { if (!fs.existsSync(directory)) {
mkdirSync(directory); mkdirSync(directory);
} }
// STYLESHEETS // // STYLESHEETS //
console.log("start stylesheets")
const stylesheetsUrls = await page.$$eval('link[rel="stylesheet"]', links => links.map(link => link.href)); const stylesheetsUrls = await page.$$eval('link[rel="stylesheet"]', links => links.map(link => link.href));
let cssDir = `${directory}/css/` let cssDir = `${directory}/css/`
const cssSheetsLocation = await this.downloadFiles(stylesheetsUrls, cssDir); const cssSheetsLocation = await this.downloadFiles(stylesheetsUrls, cssDir);
@ -34,18 +44,28 @@ export class CrawlerService {
// SCREENSHOT // // SCREENSHOT //
const screenshotBuffer: Buffer = await page.screenshot({ fullPage: true }); const screenshotBuffer: Buffer = await page.screenshot({ fullPage: true });
fs.writeFile(`${directory}screenshot.png`, screenshotBuffer, (err) => { await new Promise((resolve, reject) => {
if (err) throw err; fs.writeFile(`${directory}screenshot.png`, screenshotBuffer, (err) => {
// console.log(`Screenshot saved! ${directory}screenshot.png`); if (err) {
reject(err);
} else {
resolve(true);
}
});
}); });
// SCREENSHOT // // SCREENSHOT //
// URLS // // URLS //
const urls = await page.$$eval('a', links => links.map(link => link.href)); const urls = await page.$$eval('a', links => links.map(link => link.href));
const urlsList = urls.filter((url) => url.startsWith('http'));
console.log(urlsList);
await browser.close(); await browser.close();
return { return {
domain,
cssSheetsLocation, cssSheetsLocation,
scriptsSheetsLocation scriptsSheetsLocation,
urlsList
} }
} }
@ -68,8 +88,7 @@ export class CrawlerService {
if (fileLocation.length > 10) { if (fileLocation.length > 10) {
fileLocation = fileLocation.substring(0, 10); fileLocation = fileLocation.substring(0, 10);
} }
console.log("fileLocation: " + fileLocation) finalUrls.push(`${path}${fileLocation}`);
finalUrls.push(fileLocation);
console.log(`Saving file ${path}${fileLocation}`); console.log(`Saving file ${path}${fileLocation}`);
fs.writeFileSync(`${path}${fileLocation}`, content); fs.writeFileSync(`${path}${fileLocation}`, content);
}) })

View file

@ -1,7 +0,0 @@
import { Module } from '@nestjs/common';
import { DbService } from './db.service';
@Module({
providers: [DbService]
})
export class DbModule {}

View file

@ -1,17 +0,0 @@
import { Injectable } from '@nestjs/common';
@Injectable()
export class DbService {
constructor() {
console.log(`DbService constructor`);
}
insert(data: {
cssSheetsLocation: string[];
scriptsSheetsLocation: string[];
}, collection: string) {
console.log({data, collection});
}
}

View file

@ -0,0 +1,6 @@
export interface Site {
domain: string;
cssSheetsLocation: string[];
scriptsSheetsLocation: string[];
urlsList: string[];
}

View file

@ -0,0 +1,21 @@
import { Prop, Schema, SchemaFactory } from '@nestjs/mongoose';
import { Document } from 'mongoose';
export type SitesDocument = Sites & Document;
@Schema()
export class Sites {
@Prop({ required: true, unique: true})
domain: string;
@Prop()
cssSheetsLocation: string[];
@Prop()
scriptsSheetsLocation: string[];
@Prop({ required: true})
urlsList: string[];
}
export const SitesSchema = SchemaFactory.createForClass(Sites);