adding API usage to README
This commit is contained in:
parent
15c718dd0a
commit
c57a13d8d8
5 changed files with 28 additions and 11 deletions
|
@ -9,6 +9,8 @@ RUN apt-get update && \
|
|||
curl -sL https://deb.nodesource.com/setup_14.x | bash - && \
|
||||
apt-get install -y nodejs chromium-browser
|
||||
|
||||
RUN apt-get install -y npm
|
||||
|
||||
# Set the working directory
|
||||
WORKDIR /app
|
||||
|
||||
|
@ -22,4 +24,4 @@ RUN npm install
|
|||
COPY . .
|
||||
|
||||
# Start the application
|
||||
CMD ["npm", "run", "start:prod"]
|
||||
CMD ["npm", "run", "start:dev"]
|
||||
|
|
16
README.md
16
README.md
|
@ -18,3 +18,19 @@ Post a JSON object to the crawler with the following format:
|
|||
|
||||
The crawler will then crawl the given url and store the results in a database and assets in a file system
|
||||
`crawler_assests/www.example.com/`.
|
||||
|
||||
|
||||
# API #
|
||||
|
||||
The API is a simple API that serves the results of the crawler.
|
||||
|
||||
# Routes #
|
||||
## GET ##
|
||||
/sites - Returns a list of all sites
|
||||
/sites/:id - Returns the site object for the given site Id
|
||||
sites/domain/:domain - Returns the domain object for the given domain
|
||||
## DELETE ##
|
||||
/sites/:id - Deletes the site object for the given site Id
|
||||
sites/domain/:domain - Deletes the domain object for the given domain
|
||||
## Post ##
|
||||
sites/:id - Updates the site object for the given site Id
|
|
@ -5,17 +5,17 @@ services:
|
|||
ports:
|
||||
- '3000:3000'
|
||||
depends_on:
|
||||
- db
|
||||
- mongo
|
||||
environment:
|
||||
- MONGO_URL=mongodb://db:27017/nestjs
|
||||
- MONGO_URL=mongodb://mongo:27017/${DB_NAME}
|
||||
networks:
|
||||
- appnet
|
||||
db:
|
||||
mongo:
|
||||
image: mongo
|
||||
environment:
|
||||
- MONGO_INITDB_DATABASE=nestjs
|
||||
- MONGO_INITDB_ROOT_USERNAME=admin
|
||||
- MONGO_INITDB_ROOT_PASSWORD=adminpassword
|
||||
- MONGO_INITDB_DATABASE=${DB_NAME}
|
||||
- MONGO_INITDB_ROOT_USERNAME=${DB_USER}
|
||||
- MONGO_INITDB_ROOT_PASSWORD=${DB_PASSWORD}
|
||||
volumes:
|
||||
- dbdata:/data/db
|
||||
networks:
|
||||
|
|
|
@ -6,7 +6,9 @@ import { Site } from 'src/interfaces/site.interface';
|
|||
|
||||
@Controller('/')
|
||||
export class ApiController {
|
||||
|
||||
constructor(private crawlerService: CrawlerService, @InjectModel('Site') private readonly siteModel: Model<Site> ) {}
|
||||
|
||||
@Post('crawl')
|
||||
async crawl(@Body() body: { url: string }) {
|
||||
|
||||
|
@ -63,6 +65,4 @@ export class ApiController {
|
|||
return site || {};
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -44,7 +44,6 @@ export class CrawlerService {
|
|||
|
||||
// SCREENSHOT //
|
||||
const screenshotBuffer: Buffer = await page.screenshot({ fullPage: true });
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
fs.writeFile(`${directory}screenshot.png`, screenshotBuffer, (err) => {
|
||||
if (err) {
|
||||
|
|
Loading…
Reference in a new issue