This is a simple web scraper.
The HTTP request is sent by NodeJS and response to the client with HTML. The console show body.toString() immediately, but on the browser it loads forever, when I stop the server, it display the page.
I did the similar things in express, it just works, but for some reason, I am not allowed to use express. So I have to write in pure NodeJS.
I have also tried to do it without HTTP request, only left the function response.write and response.end in the if statement, it is fine when I put a few strings in response.write, but when I put minify HTML inside response.write, same situation happen, loading takes forever and display the page when I stop the server.
The following is my code, any help is appreciated, thanks in advance.
const url = require("url"),
http = require('http');
const port = 3000;
const requestHandler = (request, response) => {
if (request.url === "/") {
const options = {
"method": "GET",
"hostname": "something.com",
"port": null,
"path": "/",
"headers": {
"content-type": "multipart/form-data; boundary=----WebKitFormBoundary7MA4YWxkTrZu0gW",
}
};
const req = http.request(options, function (res) {
let chunks = [];
res.on("data", function (chunk) {
chunks.push(chunk);
});
res.on("end", function () {
let body = Buffer.concat(chunks);
console.log(body.toString());
const html = body.toString();
response.write(html);
response.end();
});
});
req.write("");
req.end();
}
}
const server = http.createServer(requestHandler)
server.listen(port, (err) => {
if (err) {
return console.log('something bad happened', err)
}
console.log(`server is listening on ${port}`)
})
via user3173952
No comments:
Post a Comment