diff --git a/Dockerfile b/Dockerfile index 915866c..1ad44cd 100644 --- a/Dockerfile +++ b/Dockerfile @@ -9,6 +9,8 @@ RUN apt-get update && \ curl -sL https://deb.nodesource.com/setup_14.x | bash - && \ apt-get install -y nodejs chromium-browser +RUN apt-get install -y npm + # Set the working directory WORKDIR /app @@ -22,4 +24,4 @@ RUN npm install COPY . . # Start the application -CMD ["npm", "run", "start:prod"] +CMD ["npm", "run", "start:dev"] diff --git a/README.md b/README.md index 66517a6..b5bb969 100644 --- a/README.md +++ b/README.md @@ -17,4 +17,20 @@ Post a JSON object to the crawler with the following format: } The crawler will then crawl the given url and store the results in a database and assets in a file system -`crawler_assests/www.example.com/`. \ No newline at end of file +`crawler_assests/www.example.com/`. + + +# API # + +The API is a simple API that serves the results of the crawler. + +# Routes # + ## GET ## + /sites - Returns a list of all sites + /sites/:id - Returns the site object for the given site Id + sites/domain/:domain - Returns the domain object for the given domain + ## DELETE ## + /sites/:id - Deletes the site object for the given site Id + sites/domain/:domain - Deletes the domain object for the given domain + ## Post ## + sites/:id - Updates the site object for the given site Id \ No newline at end of file diff --git a/docker-compose.yaml b/docker-compose.yaml index b1247b8..17ba5e0 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -5,17 +5,17 @@ services: ports: - '3000:3000' depends_on: - - db + - mongo environment: - - MONGO_URL=mongodb://db:27017/nestjs + - MONGO_URL=mongodb://mongo:27017/${DB_NAME} networks: - appnet - db: + mongo: image: mongo environment: - - MONGO_INITDB_DATABASE=nestjs - - MONGO_INITDB_ROOT_USERNAME=admin - - MONGO_INITDB_ROOT_PASSWORD=adminpassword + - MONGO_INITDB_DATABASE=${DB_NAME} + - MONGO_INITDB_ROOT_USERNAME=${DB_USER} + - MONGO_INITDB_ROOT_PASSWORD=${DB_PASSWORD} volumes: - dbdata:/data/db networks: diff --git a/src/api/api.controller.ts b/src/api/api.controller.ts index c885323..c9880a1 100644 --- a/src/api/api.controller.ts +++ b/src/api/api.controller.ts @@ -6,7 +6,9 @@ import { Site } from 'src/interfaces/site.interface'; @Controller('/') export class ApiController { + constructor(private crawlerService: CrawlerService, @InjectModel('Site') private readonly siteModel: Model ) {} + @Post('crawl') async crawl(@Body() body: { url: string }) { @@ -62,7 +64,5 @@ export class ApiController { const site = await this.siteModel.findByIdAndDelete(id).exec(); return site || {}; } - - } diff --git a/src/crawler/crawler.service.ts b/src/crawler/crawler.service.ts index 6eb2c44..05284d6 100644 --- a/src/crawler/crawler.service.ts +++ b/src/crawler/crawler.service.ts @@ -44,7 +44,6 @@ export class CrawlerService { // SCREENSHOT // const screenshotBuffer: Buffer = await page.screenshot({ fullPage: true }); - await new Promise((resolve, reject) => { fs.writeFile(`${directory}screenshot.png`, screenshotBuffer, (err) => { if (err) {