Uploading multiple large json files in one go React-native to NodeJS - javascript

I am trying to upload multiple large size JSON files from React-native to node js.
The files are being uploaded unless the file in larger in size, in which case, it does not upload in one try.
I suspect that:
Since the upload code is in a for loop the code is starting the upload but not waiting for the file to upload and starting to upload the next file
Is there any way to ensure that each file gets uploaded in one go?
syncFunction() {
var RNFS = require('react-native-fs');
var path = RNFS.DocumentDirectoryPath + '/toBeSynced';
RNFS.readDir(path)
.then((success) => {
for (let i = 0; i < success.length; i++) {
var fileName = success[i].name
var filePath = success[i].path
var uploadUrl = 'http://192.168.1.15:3333/SurveyJsonFiles/GetFiles/'
if (Platform.OS === 'android') {
filePath = filePath.replace("file://", "")
} else if (Platform.OS === 'ios') {
filePath = filePath
}
const data = new FormData();
data.append("files", {
uri: filePath,
type: 'multipart/form-data',
name: fileName,
});
const config = {
method: 'POST',
headers: {
'Accept': 'application/json',
},
body: data,
};
fetch(uploadUrl, config)
.then((checkStatusAndGetJSONResponse) => {
console.log(checkStatusAndGetJSONResponse);
this.moveFile(filePath, fileName)
}).catch((err) => {
console.log(err)
});
}
})
.catch((err) => {
console.log(err.message);
});
}
The JSON files will more than 50Mb depending on data, since it contains base64 image data the size will increase as the user takes more photos.
The app will be creating new files when the user records any information, There is no error message displayed for partial file upload.
The this.moveSyncedFiles() is moving the synced files to another folder so that the same file does not get uploaded multiple times
moveFile(oldpath, oldName) {
var syncedPath = RNFS.DocumentDirectoryPath + '/syncedFiles'
RNFS.mkdir(syncedPath)
syncedPath = syncedPath + "/" + oldName
RNFS.moveFile(oldpath, syncedPath)
.then((success) => {
console.log("files moved successfully")
})
.catch((err) => {
console.log(err.message)
});
}

It turns out the fault was on the nodejs side and nodemon was restarting the server every time a new file was found so we just moved the uploads folder outside the scope of the project

Related

how to upload a file to server in eel

I am using eel in python to make a web application and I want to upload a mp4 file to server from client and then download another file from server.
I use this js code to upload file but I think I must add the path of folder to localhost but I cant to this.
document.getElementById("uploadButton").onclick = () => {
let fileElement = document.getElementById('fileInput')
// check if user had selected a file
if (fileElement.files.length === 0) {
alert('please choose a file')
return
}
let file = fileElement.files[0]
let formData = new FormData();
formData.set('file', file);
axios.post("http://localhost:8001", formData, {
onUploadProgress: progressEvent => {
const percentCompleted = Math.round(
(progressEvent.loaded * 100) / progressEvent.total
);
console.log(`upload process: ${percentCompleted}%`);
}
})
.then(res => {
console.log(res.data)
console.log(res.data.url)
})
}

Image upload functionality not working on deployed Heroku app but working on Localhost?

So I created my first big project: https://rate-n-write.herokuapp.com/
In brief, this is a blog app where the user can write reviews and publish them along with pictures.
I have used firebase as the database to store the articles. The app is working fine on localhost. Whenever I am trying to upload an image on Heroku, I get this error
The error is showing up in line number 8 of the following code (editor.js):
uploadInput.addEventListener('change', () => {
uploadImage(uploadInput, "image");
})
const uploadImage = (uploadFile, uploadType) => {
const [file] = uploadFile.files;
if(file && file.type.includes("image")){
const formdata = new FormData();
formdata.append('image', file);
//Error shows up here in the fetch line
fetch('/upload', {
method: 'post',
body: formdata
}).then(res => res.json())
.then(data => {
if(uploadType == "image"){
addImage(data, file.name);
} else{
bannerPath = `${location.origin}/${data}`;
banner.style.backgroundImage = `url("${bannerPath}")`;
}
})
const change_text = document.getElementById("uploadban");
change_text.innerHTML = " ";
} else{
alert("upload Image only");
}
}
This is just a snippet of the whole editor.js file.
Is it because I am trying to upload the file to the project directory? (server.js snippet below):
app.post('/upload', (req, res) => {
let file = req.files.image;
let date = new Date();
// image name
let imagename = date.getDate() + date.getTime() + file.name;
// image upload path
let path = 'public/uploads/' + imagename;
// create upload
file.mv(path, (err, result) => {
if(err){
throw err;
} else{
// our image upload path
res.json(`uploads/${imagename}`)
}
})
})
Do I need to use an online storage service like AWS S3?
Heroku is not suitable for persistent storage of data, the uploaded pictures would be deleted after a while (when the dyno is restarted) read this.
I would suggest using 3rd party object Storage services like
cloudinary or AWS S3

How can I send an image from Node.js to React.js?

I have the following setup, by which I send the image, from its url, to be edited and sent back to be uploaded to S3. The problem I currently have is that the image gets on S3 corrupted, and I am wondering if there's trouble in my code that's causing the issue.
Server side:
function convertImage(inputStream) {
return gm(inputStream)
.contrast(-2)
.stream();
}
app.get('/resize/:imgDetails', (req, res, next) => {
let params = req.params.imgDetails.split('&');
let fileName = params[0]; console.log(fileName);
let tileType = params[1]; console.log(tileType);
res.set('Content-Type', 'image/jpeg');
let url = `https://${process.env.Bucket}.s3.amazonaws.com/images/${tileType}/${fileName}`;
convertImage(request.get(url)).pipe(res);
})
Client side:
axios.get('/resize/' + fileName + '&' + tileType)
.then(res => {
/** PUT FILE ON AWS **/
var img = res;
axios.post("/sign_s3_sized", {
fileName : fileName,
tileType : tileType,
ContentType : 'image/jpeg'
})
.then(response => {
var returnData = response.data.data.returnData;
var signedRequest = returnData.signedRequest;
var url = returnData.url;
this.setState({url: url})
// Put the fileType in the headers for the upload
var options = {
headers: {
'Content-Type': 'image/jpeg'
}
};
axios.put(signedRequest,img, options)
.then(result => {
this.setState({success: true});
}).bind(this)
.catch(error => {
console.log("ERROR: " + JSON.stringify(error));
})
})
.catch(error => {
console.log(JSON.stringify(error));
})
})
.catch(error => console.log(error))
Before going any further, I can assure you now that uploading any images via this setup minus the convertImage() works, otherwise the image gets put on S3 corrupted.
Any pointers as to what the issue behind the image being corrupted is?
Is my understanding of streams here lacking perhaps? If so, what should I change?
Thank you!
EDIT 1:
I tried not running the image through the graphicsmagick API at all (request.get(url).pipe(res);) and the image is still corrupted.
EDIT 2:
I gave up at the end and just uploaded the file from Node.js straight to S3; it turned out to be better practice anyway.
So if you are end goal is to upload the image in the S3 bucket using Node Js, there are simple ways by using multer-s3 node module.

How to get file properties and upload a file from ionic 4?

I am trying to upload a file from mobile to google bucket using ionic 4. Although a file can upload into the could. I am struggling to get the file properties out of file object.
Here is my method,
async selectAFile() {
const uploadFileDetails = {
name: '',
contentLength: '',
size: '',
type: '',
path: '',
};
this.fileChooser.open().then(uri => {
this.file.resolveLocalFilesystemUrl(uri).then(newUrl => {
let dirPath = newUrl.nativeURL;
const dirPathSegments = dirPath.split('/');
dirPathSegments.pop();
dirPath = dirPathSegments.join('/');
(<any>window).resolveLocalFileSystemURL(
newUrl.nativeURL,
function(fileEntry) {
uploadFileDetails.path = newUrl.nativeURL;
const file: any = getFileFromFileEntry(fileEntry);
//log 01
console.log({ file });
uploadFileDetails.size = file.size;
uploadFileDetails.name = `${newUrl.name
.split(':')
.pop()}.${file.type.split('/').pop()}`;
uploadFileDetails.type = file.type;
async function getFileFromFileEntry(fileEntry) {
try {
return await new Promise((resolve, reject) =>
fileEntry.file(resolve, reject)
);
} catch (err) {
console.log(err);
}
}
},
function(e) {
console.error(e);
}
);
});
});
// here uploadFileDetails is simller to what I declared at the top ;)
// I wan't this to be populated with file properties
// console.log(uploadFileDetails.name) --> //''
const uploadUrl = await this.getUploadUrl(uploadFileDetails);
const response: any = this.uploadFile(
uploadFileDetails,
uploadUrl
);
response
.then(function(success) {
console.log({ success });
this.presentToast('File uploaded successfully.');
this.loadFiles();
})
.catch(function(error) {
console.log({ error });
});
}
even though I can console.log the file in log 01. I am unable to get file properties like, size, name, type out of the resolveLocalFileSystemURL function. basically, I am unable to populate uploadFileDetails object. What am I doing wrong? Thank you in advance.
you actually need 4 Ionic Cordova plugins to upload a file after getting all the metadata of a file.
FileChooser
Opens the file picker on Android for the user to select a file, returns a file URI.
FilePath
This plugin allows you to resolve the native filesystem path for Android content URIs and is based on code in the aFileChooser library.
File
This plugin implements a File API allowing read/write access to files residing on the device.
File Trnafer
This plugin allows you to upload and download files.
getting the file's metadata.
file.resolveLocalFilesystemUrl with fileEntry.file give you all the metadata you need, except the file name. There is a property called name in the metadata but it always contains value content.
To get the human readable file name you need filePath. But remember you can't use returning file path to retrieve metadata. For that, you need the original url from fileChooser.
filePathUrl.substring(filePathUrl.lastIndexOf('/') + 1) is used to get only file name from filePath.
You need nativeURL of the file in order to upload it. Using file path returning from filePath is not going to work.
getFileInfo(): Promise<any> {
return this.fileChooser.open().then(fileURI => {
return this.filePath.resolveNativePath(fileURI).then(filePathUrl => {
return this.file
.resolveLocalFilesystemUrl(fileURI)
.then((fileEntry: any) => {
return new Promise((resolve, reject) => {
fileEntry.file(
meta =>
resolve({
nativeURL: fileEntry.nativeURL,
fileNameFromPath: filePathUrl.substring(filePathUrl.lastIndexOf('/') + 1),
...meta,
}),
error => reject(error)
);
});
});
});
});
}
select a file from the file system of the mobile.
async selectAFile() {
this.getFileInfo()
.then(async fileMeta => {
//get the upload
const uploadUrl = await this.getUploadUrl(fileMeta);
const response: Promise < any > = this.uploadFile(
fileMeta,
uploadUrl
);
response
.then(function(success) {
//upload success message
})
.catch(function(error) {
//upload error message
});
})
.catch(error => {
//something wrong with getting file infomation
});
}
uploading selected file.
This depends on your backend implementation. This is how to use File Transfer to upload a file.
uploadFile(fileMeta, uploadUrl) {
const options: FileUploadOptions = {
fileKey: 'file',
fileName: fileMeta.fileNameFromPath,
headers: {
'Content-Length': fileMeta.size,
'Content-Type': fileMeta.type,
},
httpMethod: 'PUT',
mimeType: fileMeta.type,
};
const fileTransfer: FileTransferObject = this.transfer.create();
return fileTransfer.upload(file.path, uploadUrl, options);
}
hope it helps. :)

AWS S3 Upload after GET Request to Image, Not Uploading Correctly

I'm trying to upload an image to my AWS S3 bucket after downloading the image from another URL using Node (using request-promise-native & aws-sdk):
'use strict';
const config = require('../../../configs');
const AWS = require('aws-sdk');
const request = require('request-promise-native');
AWS.config.update(config.aws);
let s3 = new AWS.S3();
function uploadFile(req, res) {
function getContentTypeByFile(fileName) {
var rc = 'application/octet-stream';
var fn = fileName.toLowerCase();
if (fn.indexOf('.png') >= 0) rc = 'image/png';
else if (fn.indexOf('.jpg') >= 0) rc = 'image/jpg';
return rc;
}
let body = req.body,
params = {
"ACL": "bucket-owner-full-control",
"Bucket": 'testing-bucket',
"Content-Type": null,
"Key": null, // Name of the file
"Body": null // File body
};
// Grabs the filename from a URL
params.Key = body.url.substring(body.url.lastIndexOf('/') + 1);
// Setting the content type
params.ContentType = getContentTypeByFile(params.Key);
request.get(body.url)
.then(response => {
params.Body = response;
s3.putObject(params, (err, data) => {
if (err) { console.log(`Error uploading to S3 - ${err}`); }
if (data) { console.log("Success - Uploaded to S3: " + data.toString()); }
});
})
.catch(err => { console.log(`Error encountered: ${err}`); });
}
The upload succeeds when I test it out, however after trying to redownload it from my bucket the image is unable to display. Additionally, I notice after uploading the file with my function, the file listed in the bucket is much larger in filesize than the originally uploaded image. I'm trying to figure out where I've been going wrong but cannot find where. Any help is appreciated.
Try to open the faulty file with a text editor, you will see some errors written in it.
You can try using s3.upload instead of putObject, it works better with streams.

Categories