How to send data from NodeJS server side to the JS client side, only when data is ready? - javascript

On my website, when the user clicks on a button, some user's data will be stored in a database and after that I want the server to send notification data to the Javascript frontend file to change the UI.
Right now, the Js file (index.js) receives data right after the website loads (always false). I want it to be received only when the data is ready on the server.
I searched a lot but couldn't find an answer to my problem?
I appreciate any help :)
server.js
var requestValidation = false;
app.post("/", function(req, res){
var name = req.body.personName;
var email = req.body.personEmail;
var collabTopic = req.body.collabTopic;
const newUser = new User({ //mongoDB schema
name: name,
email: email,
collabTopic: collabTopic
});
newUser.save(function(err){ //adding data to mongoDB
if(!err){
requestValidation = true;
}
});
});
app.get("/succ", function(req, res){
res.json(requestValidation);
});
index.js
const url = "http://localhost:3000/succ";
const getData = async (url) => {
try {
const response = await fetch(url);
const json = await response.json();
console.log(json);
} catch (error) {
console.log(error);
}
};
getData(url);

I'm not sure this is completely the answer you're looking for, but it's definitely a tool/feature to consider as you rework your approach.
app.post("/", async (req, res) => {
let result = await INSERT MONGODB UPDATE OR INSERT FUNCTION;
res.render("YOUR TEMPLATE", result);
});
You probably can't plug and play this, but when you finish a MongoDB operation, it returns a json object with some details on whether or not there was success. For example, a MongoDB insert operation returns something like this (stored in the variable result that I created)
{ "acknowledged" : true, "insertedId" : ObjectId("5fd989674e6b9ceb8665c57d") }
and then you can pass this value on as you wish.
Edit: This is what tkausl referred to in a comment.

Here is an example if you want to pass the content of a txt file to the client with express and jquery:
in express:
app.get('/get', (req, res) => {
fs.readFile('test.txt', (err, data) => {
if (err) throw err;
return res.json(JSON.parse(data));
})
})
jquery in client side:
$.getJSON( "http://localhost:3000/get", function( data ) {
geojsondata1 = JSON.stringify(data)
}
now you can do anything you want with the variable data

Related

Mongodb returns an empty array while retrieving data through nodejs

let mongodb = require('mongodb').MongoClient;
let express = require("express")
let app = express()
let connectionString = 'mongodb://ToDoAppUser:ToDoAppUserPassword#ac-u9kgapm-shard-00-00.8rdkdoi.mongodb.net:27017,ac-u9kgapm-shard-00-01.8rdkdoi.mongodb.net:27017,ac-u9kgapm-shard-00-02.8rdkdoi.mongodb.net:27017/?ssl=true&replicaSet=atlas-68qno6-shard-0&authSource=admin&retryWrites=true&w=majority'
let db
mongodb.connect(connectionString,function(err,client){
if (err) throw err
db = client.db()
app.listen(3000)
console.log("Database connected.");
})
app.use(express.urlencoded({extended : false}))
Trying to retrieve data from MongodB
As you can see that , I am trying to retrieve data from MongoDB collection named #item and want to print it. But it shows an empty array. I am stuck on this. kindly help me to resolve this issue.
app.get("/", function(req, res){
**// this collectio method of mongodb returns empty array.
// however, mongodb got connected, but i cannot retreive data from mongodb**
db.collection('items').find().toArray(function(err, items) {
if(err) console.log(err)
console.log(items)
})
You need to use the following format.
async function findOneListingByName(client, nameOfListing) {
const result = await client.db("sample_airbnb").collection("listingsAndReviews").findOne({ name: nameOfListing });
if (result) {
console.log(`Found a listing in the collection with the name '${nameOfListing}':`);
console.log(result);
} else {
console.log(`No listings found with the name '${nameOfListing}'`);
}
}
This code above is worked for me.
By the way, you can read their documentation here for more examples:
https://www.mongodb.com/developer/languages/javascript/node-crud-tutorial/
My guess is that the call to fetch items from DB is asynchronous, and you're trying to use items synchronous manner.
Try adding async to the controller function and using await for the DB request. Like this:
app.get("/", async function(req, res){
/* Mongo documents don't show any parameters for the toArray method
* Read here https://www.mongodb.com/docs/manual/reference/method/cursor.toArray/#mongodb-method-cursor.toArray
*
*/
const itemsFromDB = await db.collection('items').find().toArray()
conssole.log('items are:' itemsFromDB )
})

retrieve data from forEach and send to front end node.js

I have this function:
app.get('/dashboard', async(req, res) => {
const customers = await stripe.customers.list();
customers.data.forEach(customer => {
console.log(customer.id);
});
// res.render('dashboard.ejs', {customer: customers})
})
what it does is returns all the customers from my Stripe db. However, now i want to pass the data returned from the loop, which is all of it lol, and access it on the front-end. How can i do this?

Can't write file using FS

So, I have a NodeJS API using express and I want to get data from a HTTP request and store it in a local JSON file. This is the file that contains the function to write the local file:
const fs = require('fs').promises;
async function updateData(req,res){
let { name, phone, email, addres, whatsapp, officeHours} = req.body;
let configs={
name:name,
phone:phone,
email:email,
addres:addres,
whatsapp:whatsapp,
officeHours: officeHours
};
let data = JSON.stringify(configs);
console.log(data);
try{
await fs.writeFile('./configs.json', data);
}catch(err){
throw err;
}
}
module.exports = {updateData};
And this is the part of the code I use to call the function with the Post route
routes.post('/storedata', storeData.updateData);
When I send the HTTP request, using Insomnia, I don't get any errors and the data is logged in the console, but the file isn't written.
Try an absolute path like writeFile( __dirname + '/configs.json', data)

SQL to MongoDB Migration using nodejs script

Im new to nodejs and Im currently doing an sql to mongodb migration. I have created a script to load data to mongodb from sql queries. I created the script with the sample code from Google and it is working. But im facing below issue and need a workaround for this.
I have an sql query array and I don't need to run those queries if any of the queries has any syntax issues or any errors in the query result. (Say if the second query has syntax issue then no need to load the data of first query to mongo, currently its loading in my case). Basically if any of the query has any issue then no need to load the result in the mongo collection. And also if any issues from the mongo side no need to commit the transactions.
I have used the mongo transactions here to roll back the data if any errors. please find the below code and any help would be much appreciated.The sql and mongo credentials are mock data only.
config file code
var mongoCollection = 'collectionName';
exports.mongoCollection = mongoCollection;
var queryList = [
'sample query one',
'sample query two '
];
exports.queryList = queryList;
main script code
var MongoClient = require('mongodb').MongoClient;
var sql = require('mysql');
const config = require('./assets/config');
var sqlConfig = {
user: 'username',
password: 'password',
server: 'servername',
database: 'databasename',
port: 'portname',
multipleStatements: true
};
async function transaction() {
const mongodbUrl = 'mongourl';
const client = await MongoClient.connect(mongodbUrl, {useNewUrlParser: true}, {useUnifiedTopology:
true});
const db = client.db();
config.queryList.forEach(query => {
new sql.ConnectionPool(sqlConfig).connect().then(pool => {
return pool.request().query(query)
}).then(result => {
(async()=>{
const session = client.startSession();
session.startSession({
readConcers: {level: 'snapshot'},
writeConcern: {w: 'majority'}
});
try {
const collection = client.db('mongodbName').collection(config.mongoCollection);
await collection.insertMany(result.recordset, {session});
await session.commitTransaction();
session.emdSession();
console.log('transaction completed');
}catch(error){
await session.abortTransaction();
session.endSession();
console.log('transaction aborted');
throw error;
}
});
sql.close();
}).catch(error => {
sql.close();
throw error;
})
});
};
transaction();
Depending on the volume of data, you might look at breaking the process into two parts
Get the data from mySql
If no errors, load into Mongo
That would save you having to roll back the mongo writes
You can also take advantage of the default mongo pool size (5) and use pool on the mySQL side too.
Currently, this code is creating a pool for every select, which isn't optimal
config.queryList.forEach(query => {
new sql.ConnectionPool(sqlConfig).connect().then(pool => {//<-New pool per query?
return pool.request().query(query)
})
})
Instead, you can set up a pool once, per the mySql documentation
It looks like that driver only has a callback api, but you can promisfy the query to make it easier to work with.
So to put it all together, you could try something like this (this isn't working/tested code, just a suggestion)
var MongoClient = require('mongodb').MongoClient;
var sql = require('mysql');
const config = require('./assets/config');
var pool = sql.createPool({
connectionLimit : 5,
host : 'servername',
user : 'username',
password : 'password',
database : 'databasename'
});
async function transaction() {
try{
const mongodbUrl = 'mongourl';
const client = await MongoClient.connect(mongodbUrl, {useNewUrlParser: true}, {useUnifiedTopology: true});
const db = client.db();
const collection = client.db('mongodbName').collection(config.mongoCollection);
//Map your query list to an array of runSql promises
//this will complete when all queries return, and jump to the catch if any fail
let results = await Promise.all(config.queryList.map(runSql))
//Map the results to an array of mongo inserts
let inserts = await Promise.all(results.map(r=>collection.insertMany(r.recordset)))
//Close all connections
pool.end((err)=>err?console.err(err):console.log('MySQL Closed'))
client.close((err)=>err?console.err(err):console.log('MongoDB Closed'))
}
catch(err){
console.error(err)
}
};
transaction();
function runSql(queryStr){
return new Promise((resolve, reject)=>{
pool.query(queryStr, function (error, results, fields){
error?reject(error):resolve(results)
})
})
}
If data volume is a concern, you might want to look at getting streams from your mySql selects instead of just running them

Do you need to save a file locally before sending it to a mongo db?

I am learning how to upload images from my React website to my Mongo database through an express server. In every tutorial I have read, the author saves the file locally in the express server before sending it to the Mongo database. Is there a way to avoid having to store the file locally by keeping it in a local variable which is then uploaded to the database?
Here are the tutorials I am referring to:
https://www.positronx.io/react-file-upload-tutorial-with-node-express-and-multer/
https://medium.com/ecmastack/uploading-files-with-react-js-and-node-js-e7e6b707f4ef
Thank you for your help.
I guess The GridFS API would be helpful to you.It says :
you can .pipe() directly from file streams to MongoDB
Here is the sample example according to doc :
const assert = require('assert');
const fs = require('fs');
const mongodb = require('mongodb');
const uri = 'mongodb://localhost:27017';
const dbName = 'test';
mongodb.MongoClient.connect(uri, function(error, client) {
assert.ifError(error);
const db = client.db(dbName);
var bucket = new mongodb.GridFSBucket(db);
fs.createReadStream('./meistersinger.mp3').
pipe(bucket.openUploadStream('meistersinger.mp3')).
on('error', function(error) {
assert.ifError(error);
}).
on('finish', function() {
console.log('done!');
process.exit(0);
});
});
documentation link : https://mongodb.github.io/node-mongodb-native/3.0/tutorials/gridfs/streaming/
Hope this help !
yes you want to store you files locally. I used an NFS server (FreeNas) and mounted it to that local folder.
So when i saved a file to that location, it was stored on the other NFS server. Then i sent that image location as a response back to react, which then saved that location in Mongodb.
Example uploads.js
router.post('/', auth, async (req, res) => {
let CurrentDate = moment().unix();
if (req.files.file === null) {
return res.status(400).json({ msg: 'no file uploaded' });
}
let user = await User.findById(req.user.id).select('-password');
let file = req.files.file;
file.name = CurrentDate + user._id + '.jpg';
file.mv(`./client/public/uploads/${file.name}`, (err) => {
if (err) {
console.error(err);
return res.status(500).send(err);
}
res.json({ fileName: file.name, filePath: `/uploads/${file.name}`});
});
});
This is what the mongodb entry looks like
image:"/uploads/15951066675f1365239d46882312332d20.jpg"

Categories