work version , still bugs for auth in mongo, changing vesrion to 5.0.15
This commit is contained in:
parent
75c4f55c01
commit
43b598689c
7 changed files with 19 additions and 18 deletions
1
.dockerignore
Normal file
1
.dockerignore
Normal file
|
@ -0,0 +1 @@
|
||||||
|
node_modules
|
18
Dockerfile
18
Dockerfile
|
@ -1,15 +1,14 @@
|
||||||
FROM ubuntu:20.04 AS base
|
FROM node:14-alpine AS base
|
||||||
|
|
||||||
# Set non-interactive mode
|
# Set non-interactive mode
|
||||||
ENV DEBIAN_FRONTEND noninteractive
|
ENV DEBIAN_FRONTEND=noninteractive
|
||||||
ENV NODE_ENV=production
|
ENV NODE_ENV=production
|
||||||
|
|
||||||
# Install required packages
|
# Install chromium
|
||||||
RUN apt-get update && \
|
RUN apk add --no-cache chromium chromium-chromedriver
|
||||||
curl -sL https://deb.nodesource.com/setup_14.x | bash - && \
|
|
||||||
apt-get install -y nodejs chromium-browser
|
|
||||||
|
|
||||||
RUN apt-get install -y npm
|
# Create a symbolic link for google-chrome
|
||||||
|
RUN ln -s /usr/bin/chromium-browser /usr/bin/google-chrome
|
||||||
|
|
||||||
# Set the working directory
|
# Set the working directory
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
@ -18,10 +17,11 @@ WORKDIR /app
|
||||||
COPY package*.json ./
|
COPY package*.json ./
|
||||||
|
|
||||||
# Install dependencies
|
# Install dependencies
|
||||||
RUN npm install
|
RUN npm install -g npm@9.6.3 && \
|
||||||
|
npm install
|
||||||
|
|
||||||
# Copy the source code
|
# Copy the source code
|
||||||
COPY . .
|
COPY . .
|
||||||
|
|
||||||
# Start the application
|
# Start the application
|
||||||
CMD ["npm", "run", "start"]
|
CMD ["npm", "run", "start"]
|
|
@ -7,15 +7,15 @@ services:
|
||||||
depends_on:
|
depends_on:
|
||||||
- mongo
|
- mongo
|
||||||
environment:
|
environment:
|
||||||
- MONGO_URL=mongodb://mongo:27017/${DB_NAME}
|
- MONGO_URL=mongodb://mongo:27017/mydatabase
|
||||||
networks:
|
networks:
|
||||||
- appnet
|
- appnet
|
||||||
mongo:
|
mongo:
|
||||||
image: mongo
|
image: mongo:5.0.15
|
||||||
environment:
|
environment:
|
||||||
- MONGO_INITDB_DATABASE=${DB_NAME}
|
- MONGO_INITDB_DATABASE=mydatabase
|
||||||
- MONGO_INITDB_ROOT_USERNAME=${DB_USER}
|
- MONGO_INITDB_ROOT_USERNAME=akamai
|
||||||
- MONGO_INITDB_ROOT_PASSWORD=${DB_PASSWORD}
|
- MONGO_INITDB_ROOT_PASSWORD=password
|
||||||
volumes:
|
volumes:
|
||||||
- dbdata:/data/db
|
- dbdata:/data/db
|
||||||
networks:
|
networks:
|
||||||
|
|
|
@ -29,10 +29,10 @@
|
||||||
"mongoose": "^7.0.4",
|
"mongoose": "^7.0.4",
|
||||||
"puppeteer": "^19.9.1",
|
"puppeteer": "^19.9.1",
|
||||||
"reflect-metadata": "^0.1.13",
|
"reflect-metadata": "^0.1.13",
|
||||||
|
"@nestjs/cli": "^9.0.0",
|
||||||
"rxjs": "^7.5.5"
|
"rxjs": "^7.5.5"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@nestjs/cli": "^9.0.0",
|
|
||||||
"@nestjs/schematics": "^9.0.0",
|
"@nestjs/schematics": "^9.0.0",
|
||||||
"@nestjs/testing": "^9.0.0",
|
"@nestjs/testing": "^9.0.0",
|
||||||
"@types/express": "^4.17.13",
|
"@types/express": "^4.17.13",
|
||||||
|
|
|
@ -19,7 +19,7 @@ export class ApiController {
|
||||||
newSite.save().then((result) => {
|
newSite.save().then((result) => {
|
||||||
console.log("Site saved !", result);
|
console.log("Site saved !", result);
|
||||||
}).catch((err) => {
|
}).catch((err) => {
|
||||||
console.log("Error saving site !", err);
|
console.log("Error saving site !", err.message);
|
||||||
});
|
});
|
||||||
}).catch((err) => {
|
}).catch((err) => {
|
||||||
console.log("** Error crawling ! **", err);
|
console.log("** Error crawling ! **", err);
|
||||||
|
|
|
@ -8,7 +8,7 @@ dotenv.config();
|
||||||
|
|
||||||
@Module({
|
@Module({
|
||||||
imports: [
|
imports: [
|
||||||
MongooseModule.forRoot(process.env.DB_URI, {
|
MongooseModule.forRoot(process.env.MONGO_URL, {
|
||||||
useNewUrlParser: true,
|
useNewUrlParser: true,
|
||||||
useUnifiedTopology: true,
|
useUnifiedTopology: true,
|
||||||
}),
|
}),
|
||||||
|
|
|
@ -18,7 +18,7 @@ export class CrawlerService {
|
||||||
async crawl(url: string): Promise<Site> {
|
async crawl(url: string): Promise<Site> {
|
||||||
|
|
||||||
console.log("start crawl website", url);
|
console.log("start crawl website", url);
|
||||||
const browser = await puppeteer.launch({ headless: true, args: ['--no-sandbox'] });
|
const browser = await puppeteer.launch({ executablePath: '/usr/bin/chromium-browser', headless: true, args: ['--no-sandbox'] });
|
||||||
const page = await browser.newPage();
|
const page = await browser.newPage();
|
||||||
const domain = this.extractDomain(url);
|
const domain = this.extractDomain(url);
|
||||||
await page.goto(url);
|
await page.goto(url);
|
||||||
|
|
Loading…
Reference in a new issue