added logs

This commit is contained in:
Kfir Dayan 2023-05-31 13:27:00 +03:00
parent 9f23246458
commit a0b79f2287

View file

@ -5,6 +5,7 @@
use App\Models\WebCrawl; use App\Models\WebCrawl;
use GuzzleHttp\Client; use GuzzleHttp\Client;
use Illuminate\Http\Request; use Illuminate\Http\Request;
use Illuminate\Support\Facades\Log;
class WebCrawlController extends Controller class WebCrawlController extends Controller
{ {
@ -25,6 +26,7 @@ public function crawlWebsite(Request $request)
// Check if the URL is already in the database // Check if the URL is already in the database
$webCrawl = WebCrawl::where('url', $url)->first(); $webCrawl = WebCrawl::where('url', $url)->first();
if ($webCrawl && !$refresh) { if ($webCrawl && !$refresh) {
Log::error("This URL already exists in the database $url");
return response()->json([ return response()->json([
'error' => 'This URL already exists in the database', 'error' => 'This URL already exists in the database',
], 400); ], 400);
@ -42,6 +44,7 @@ public function crawlWebsite(Request $request)
try { try {
$crawler->save(); $crawler->save();
} catch (\Exception $e) { } catch (\Exception $e) {
Log::error($e->getMessage());
return response()->json([ return response()->json([
'error' => 'Failed to save the URL to the database', 'error' => 'Failed to save the URL to the database',
], 500); ], 500);
@ -57,11 +60,13 @@ public function crawlWebsite(Request $request)
]); ]);
} }
} else { } else {
Log::error("Failed to retrieve the URL $url");
return response()->json([ return response()->json([
'error' => 'Failed to retrieve the URL', 'error' => 'Failed to retrieve the URL',
], 500); ], 500);
} }
Log::info("Crawling completed successfully For URL $url");
return response()->json([ return response()->json([
'message' => 'Crawling completed successfully', 'message' => 'Crawling completed successfully',
]); ]);
@ -84,7 +89,9 @@ protected function crawlWebsiteRecursive($url, $depth)
$linksFromPage = $this->getLinksFromPage($crawler->content); $linksFromPage = $this->getLinksFromPage($crawler->content);
try { try {
$crawler->save(); $crawler->save();
Log::info("URL saved to the database $url");
} catch (\Exception $e) { } catch (\Exception $e) {
Log::error("Can't save the URL to the database $url");
return []; return [];
} }
if ($depth > 0 && count($linksFromPage) > 0) { if ($depth > 0 && count($linksFromPage) > 0) {
@ -124,10 +131,12 @@ public function destroy($id)
$webCrawl = WebCrawl::find($id); $webCrawl = WebCrawl::find($id);
if ($webCrawl) { if ($webCrawl) {
$webCrawl->delete(); $webCrawl->delete();
Log::info("Web crawl deleted successfully For ID $id");
return response()->json([ return response()->json([
'message' => 'Web crawl deleted successfully', 'message' => 'Web crawl deleted successfully',
]); ]);
} }
Log::error("Web crawl not found For ID $id");
return response()->json([ return response()->json([
'error' => 'Web crawl not found', 'error' => 'Web crawl not found',
], 404); ], 404);
@ -136,6 +145,7 @@ public function destroy($id)
public function destroyAll() public function destroyAll()
{ {
WebCrawl::truncate(); WebCrawl::truncate();
Log::info("All web crawls deleted successfully");
return response()->json([ return response()->json([
'message' => 'All web crawls deleted successfully', 'message' => 'All web crawls deleted successfully',
]); ]);