adding API usage to README

This commit is contained in:
Kfir Dayan 2023-04-19 10:53:12 +03:00
parent 15c718dd0a
commit c57a13d8d8
5 changed files with 28 additions and 11 deletions

View file

@ -9,6 +9,8 @@ RUN apt-get update && \
curl -sL https://deb.nodesource.com/setup_14.x | bash - && \ curl -sL https://deb.nodesource.com/setup_14.x | bash - && \
apt-get install -y nodejs chromium-browser apt-get install -y nodejs chromium-browser
RUN apt-get install -y npm
# Set the working directory # Set the working directory
WORKDIR /app WORKDIR /app
@ -22,4 +24,4 @@ RUN npm install
COPY . . COPY . .
# Start the application # Start the application
CMD ["npm", "run", "start:prod"] CMD ["npm", "run", "start:dev"]

View file

@ -18,3 +18,19 @@ Post a JSON object to the crawler with the following format:
The crawler will then crawl the given url and store the results in a database and assets in a file system The crawler will then crawl the given url and store the results in a database and assets in a file system
`crawler_assests/www.example.com/`. `crawler_assests/www.example.com/`.
# API #
The API is a simple API that serves the results of the crawler.
# Routes #
## GET ##
/sites - Returns a list of all sites
/sites/:id - Returns the site object for the given site Id
sites/domain/:domain - Returns the domain object for the given domain
## DELETE ##
/sites/:id - Deletes the site object for the given site Id
sites/domain/:domain - Deletes the domain object for the given domain
## Post ##
sites/:id - Updates the site object for the given site Id

View file

@ -5,17 +5,17 @@ services:
ports: ports:
- '3000:3000' - '3000:3000'
depends_on: depends_on:
- db - mongo
environment: environment:
- MONGO_URL=mongodb://db:27017/nestjs - MONGO_URL=mongodb://mongo:27017/${DB_NAME}
networks: networks:
- appnet - appnet
db: mongo:
image: mongo image: mongo
environment: environment:
- MONGO_INITDB_DATABASE=nestjs - MONGO_INITDB_DATABASE=${DB_NAME}
- MONGO_INITDB_ROOT_USERNAME=admin - MONGO_INITDB_ROOT_USERNAME=${DB_USER}
- MONGO_INITDB_ROOT_PASSWORD=adminpassword - MONGO_INITDB_ROOT_PASSWORD=${DB_PASSWORD}
volumes: volumes:
- dbdata:/data/db - dbdata:/data/db
networks: networks:

View file

@ -6,7 +6,9 @@ import { Site } from 'src/interfaces/site.interface';
@Controller('/') @Controller('/')
export class ApiController { export class ApiController {
constructor(private crawlerService: CrawlerService, @InjectModel('Site') private readonly siteModel: Model<Site> ) {} constructor(private crawlerService: CrawlerService, @InjectModel('Site') private readonly siteModel: Model<Site> ) {}
@Post('crawl') @Post('crawl')
async crawl(@Body() body: { url: string }) { async crawl(@Body() body: { url: string }) {
@ -63,6 +65,4 @@ export class ApiController {
return site || {}; return site || {};
} }
} }

View file

@ -44,7 +44,6 @@ export class CrawlerService {
// SCREENSHOT // // SCREENSHOT //
const screenshotBuffer: Buffer = await page.screenshot({ fullPage: true }); const screenshotBuffer: Buffer = await page.screenshot({ fullPage: true });
await new Promise((resolve, reject) => { await new Promise((resolve, reject) => {
fs.writeFile(`${directory}screenshot.png`, screenshotBuffer, (err) => { fs.writeFile(`${directory}screenshot.png`, screenshotBuffer, (err) => {
if (err) { if (err) {