ECONRESET socket hungup - javascript

I have a function that triggers on firebase database onWrite. The function body use two google cloud apis (DNS and Storage).
While the function is running and working as expected (mostly), the issue is that the Socket hang up more often than I'd like. (50%~ of times)
My questions are:
Is it similar to what the rest of the testers have experienced? Is it a well known issue that is outstanding or expected behavior?
the example code is as follows:
const functions = require('firebase-functions');
const admin = require('firebase-admin');
const {credentials} = functions.config().auth;
credentials.private_key = credentials.private_key.replace(/\\n/g, '\n');
const config = Object.assign({}, functions.config().firebase, {credentials});
admin.initializeApp(config);
const gcs = require('#google-cloud/storage')({credentials});
const dns = require('#google-cloud/dns')({credentials});
const zoneName = 'applambda';
const zone = dns.zone(zoneName);
exports.createDeleteDNSAndStorage = functions.database.ref('/apps/{uid}/{appid}/name')
.onWrite(event => {
// Only edit data when it is first created.
const {uid, appid} = event.params;
const name = event.data.val();
const dbRef = admin.database().ref(`/apps/${uid}/${appid}`);
if (event.data.previous.exists()) {
console.log(`already exists ${uid}/${appid}`);
return;
}
// Exit when the data is deleted.
if (!event.data.exists()) {
console.log(`data is being deleted ${uid}/${appid}`);
return;
}
const url = `${name}.${zoneName}.com`;
console.log(`data: ${uid}/${appid}/${name}\nsetting up: ${url}`);
setupDNS({url, dbRef});
setupStorage({url, dbRef});
return;
});
function setupDNS({url, dbRef}) {
// Create an NS record.
let cnameRecord = zone.record('cname', {
name: `${url}.`,
data: 'c.storage.googleapis.com.',
ttl: 3000
});
zone.addRecords(cnameRecord).then(function() {
console.log(`done setting up zonerecord for ${url}`);
dbRef.update({dns: url}).then(res => console.log(res)).catch(err => console.log(err));
}).catch(function(err) {
console.error(`error setting up zonerecord for ${url}`);
console.error(err);
});
}
function setupStorage({url, dbRef}) {
console.log(`setting up storage bucket for ${url}`);
gcs.createBucket(url, {
website: {
mainPageSuffix: `https://${url}`,
notFoundPage: `https://${url}/404.html`
}
}).then(function(res) {
let bucket = res[0];
console.log(`created bucket ${url}, setting it as public`);
dbRef.update({storage: url}).then(function() {
console.log(`done setting up bucket for ${url}`);
}).catch(function(err) {
console.error(`db update for storage failed ${url}`);
console.error(err);
});
bucket.makePublic().then(function() {
console.log(`bucket set as public for ${url}`);
}).catch(function(err) {
console.error(`setting public for storage failed ${url}`);
console.error(err);
});
}).catch(function(err) {
console.error(`creating bucket failed ${url}`);
console.error(err);
});
}

I'm thinking your function needs to return a promise so that all the other async work has time to complete before the function shuts down. As it's shown now, your functions simply returns immediately without waiting for the work to complete.
I don't know the cloud APIs you're using very well, but I'd guess that you should make your setupDns() and setupStorage() return the promises from the async work that they're doing, then return Promise.all() passing those two promises to let Cloud Functions know it should wait until all that work is complete before cleaning up the container that's running the function.

Related

How do I use JavaScript to call the AWS Textract service to upload a local photo for identification (without S3)

I want to call the AWS Textract service to identify the numbers in a local photo in JavaScript(without S3) and I get an error
TypeError:Cannot read property 'byteLength' of undefined ': Error in' Client.send (command)
I tried to find the correct sample in the AWS SDK for JavaScript V3 official documentation but couldn't find it.
I want to know how do I modify the code to call this service
This is my code
const {
TextractClient,
AnalyzeDocumentCommand
} = require("#aws-sdk/client-textract");
// Set the AWS region
const REGION = "us-east-2"; // The AWS Region. For example, "us-east-1".
var fs = require("fs");
var res;
var imagedata = fs.readFileSync('./1.png')
res = imagedata.toString('base64')
console.log("res2")
console.log(typeof(res))
// console.log(res)
const client = new TextractClient({ region: REGION });
const params = {
Document : {
Bytes: res
}
}
console.log("params")
console.log(typeof(params))
// console.log(params)
const command = new AnalyzeDocumentCommand(params);
console.log("command")
console.log(typeof(command))
const run = async () => {
// async/await.
try {
const data = await client.send(command);
console.log(data)
// process data.
} catch (error) {
console.log("Error");
console.log(error)
// error handling.
} finally {
// finally.
}
};
run()

Firebase cloud function call client side script

I have a script in Reactjs that get data (numbers) from api and addup this numbers with numbers from Firebase collection when user opens the page and the user can see this numbers.
There are going to be many users in the app and every user is going to have diffrent numbers from the same script
I was wondering if its possible with Firebase Cloud Functions to run this Client side script on the server and do the callculations of this numbers on the server and store this numbers in a Firestore collection.
im a begginer in nodejs and cloud functions i dont know if this is possible to do
get the numbers from Api
getLatestNum = (sym) => {
return API.getMarketBatch(sym).then((data) => {
return data;
});
};
Cloud function i was trying
const functions = require('firebase-functions');
const admin = require('firebase-admin');
admin.initializeApp();
const db = admin.firestore();
exports.resetAppointmentTimes = functions.pubsub
.schedule('30 20 * * *')
.onRun((context) => {
const appointmentTimesCollectionRef = db.collection('data');
return appointmentTimesCollectionRef
.get()
.then((querySnapshot) => {
if (querySnapshot.empty) {
return null;
} else {
let batch = db.batch();
querySnapshot.forEach((doc) => {
console.log(doc);
});
return batch.commit();
}
})
.catch((error) => {
console.log(error);
return null;
});
});
It is indeed possible to call a REST API from a Cloud Function. You need to use a Node.js library which returns Promises, like axios.
It's not 100% clear, in your question, to which specific Firestore doc(s) you want to write, but I make the asumption it will be done within the batched write.
So, something along the following lines should do the trick:
const functions = require('firebase-functions');
const admin = require('firebase-admin');
const axios = require('axios');
admin.initializeApp();
const db = admin.firestore();
exports.resetAppointmentTimes = functions.pubsub
.schedule('30 20 * * *')
.onRun((context) => {
let apiData;
return axios.get('https://yourapiuri...')
.then(response => {
apiData = response.data; //For example, it depends on what the API returns
const appointmentTimesCollectionRef = db.collection('data');
return appointmentTimesCollectionRef.get();
})
.then((querySnapshot) => {
if (querySnapshot.empty) {
return null;
} else {
let batch = db.batch();
querySnapshot.forEach((doc) => {
batch.update(doc.ref, { fieldApiData: apiData});
});
return batch.commit();
}
})
.catch((error) => {
console.log(error);
return null;
});
});
Two things to note:
If you want to add the API result to some fields value, you need to give more details on your exact need
Important: You need to be on the "Blaze" pricing plan. As a matter of fact, the free "Spark" plan "allows outbound network requests only to Google-owned services". See https://firebase.google.com/pricing/ (hover your mouse on the question mark situated after the "Cloud Functions" title)

Making a distinction between file not present and access denied while accessing s3 object via Javascript

I have inherited the following code. This is part of CICD pipeline. It tries to get an object called "changes" from a bucket and does something with it. If it is able to grab the object, it sends a success message back to pipeline. If it fails to grab the file for whatever reason, it sends a failure message back to codepipeline.
This "changes" file is made in previous step of the codepipeline. However, sometimes it is valid for this file NOT to exist (i.e. when there IS no change).
Currently, the following code makes no distinction if file simply does not exist OR some reason code failed to get it (access denied etc.)
Desired:
I would like to send a success message back to codepipeline if file is simply not there.
If there is access issue , then the current outcome of "failure' would still be valid.
Any help is greatly appreciated. Unfortunately I am not good enough with Javascript to have any ideas to try.
RELEVANT PARTS OF THE CODE
const AWS = require("aws-sdk");
const s3 = new AWS.S3();
const lambda = new AWS.Lambda();
const codePipeline = new AWS.CodePipeline();
// GET THESE FROM ENV Variables
const {
API_SOURCE_S3_BUCKET: s3Bucket,
ENV: env
} = process.env;
const jobSuccess = (CodePipeline, params) => {
return new Promise((resolve, reject) => {
CodePipeline.putJobSuccessResult(params, (err, data) => {
if (err) { reject(err); }
else { resolve(data); }
});
});
};
const jobFailure = (CodePipeline, params) => {
return new Promise((resolve, reject) => {
CodePipeline.putJobFailureResult(params, (err, data) => {
if (err) { reject(err); }
else { resolve(data); }
});
});
};
// MAIN CALLER FUNCTION. STARTING POINT
exports.handler = async (event, context, callback) => {
try {
// WHAT IS IN changes file in S3
let changesFile = await getObject(s3, s3Bucket, `lambda/${version}/changes`);
let changes = changesFile.trim().split("\n");
console.log("List of Changes");
console.log(changes);
let params = { jobId };
let jobSuccessResponse = await jobSuccess(codePipeline, params);
context.succeed("Job Success");
}
catch (exception) {
let message = "Job Failure (General)";
let failureParams = {
jobId,
failureDetails: {
message: JSON.stringify(message),
type: "JobFailed",
externalExecutionId: context.invokeid
}
};
let jobFailureResponse = await jobFailure(codePipeline, failureParams);
console.log(message, exception);
context.fail(`${message}: ${exception}`);
}
};
S3 should return an error code in the exception:
The ones you care about are below:
AccessDenied - Access Denied
NoSuchKey - The specified key does not exist.
So in your catch block you should be able to validate exception.code to check if it matches one of these 2.

Firebase cloud functions onCreate painfully slow to update database

I am having a slightly odd issue, and due to the lack of errors, I am not exactly sure what I am doing wrong. What I am trying to do is on an onCreate event, make an API call, and then update a field on the database if the field is not set to null. Based on my console logs for cloud functions, I can see the API call getting a ok, and everything is working properly, but after about 2-5 minutes, it will update. A few times, it didnt update after 15 mins. What is causing such a slow update?
I have eliminated the gaxios call as the bottleneck simply from the functions logs, and local testing.
Some context: I am on the firebase blaze plan to allow for egress and my dataset isnt really big. I am using gaxios because it is already part of firebase-funcstions npm install.
The code is:
const functions = require('firebase-functions');
const { request } = require('gaxios');
const { parse } = require('url');
exports.getGithubReadme = functions.firestore.document('readmes/{name}').onCreate((snapshot, context) => {
const toolName = context.params.name;
console.log(toolName);
const { name, description, site } = snapshot.data();
console.log(name, description, site);
const parsedUrl = parse(site);
console.log(parsedUrl);
if (description) return;
if (parsedUrl.hostname === 'github.com') {
let githubUrl = `https://api.github.com/repos${parsedUrl.path}/readme`;
request({
method : 'GET',
url : githubUrl
})
.then((res) => {
let { content } = res.data;
return snapshot.ref.update({ description: content });
})
.catch((error) => {
console.log(error);
return null;
});
}
return null;
});
When you execute an asynchronous operation (i.e. request() in your case) in a background triggered Cloud Function, you must return a promise, in such a way the Cloud Function waits that this promise resolves in order to terminate.
This is very well explained in the official Firebase video series here (Learning Cloud Functions for Firebase (video series)). In particular watch the three videos titled "Learn JavaScript Promises" (Parts 2 & 3 especially focus on background triggered Cloud Functions, but it really worth watching Part 1 before).
So you should adapt your code as follows, returning the promise returned by request():
const functions = require('firebase-functions');
const { request } = require('gaxios');
const { parse } = require('url');
exports.getGithubReadme = functions.firestore.document('readmes/{name}').onCreate((snapshot, context) => {
const toolName = context.params.name;
console.log(toolName);
const { name, description, site } = snapshot.data();
console.log(name, description, site);
const parsedUrl = parse(site);
console.log(parsedUrl);
if (description) return null;
if (parsedUrl.hostname === 'github.com') {
let githubUrl = `https://api.github.com/repos${parsedUrl.path}/readme`;
return request({
method: 'GET',
url: githubUrl
})
.then((res) => {
let { content } = res.data;
return snapshot.ref.update({ description: content });
})
.catch((error) => {
console.log(error);
return null;
});
} else {
return null;
}
});

Google cloud function bigquery json insert TypeError: job.promise is not a function

I'm replicating this Google authored tutorial and I have run into a problem and error that I can't figure out how to resolve.
On the Google Cloud Function import json to bigquery, I get an error " TypeError: job.promise is not a function "
Which is located towards the bottom of the function, the code in question is:
.then(([job]) => job.promise())
The error led me to this discussion about the API used, but I don't understand how to resolve the error.
I tried .then(([ job ]) => waitJobFinish(job)) and removing the line resolves the error but doesn't insert anything.
Tertiary question: I also can't find documentation on how to trigger a test of the function so that I can read my console.logs in the google cloud function console, which would help to figure this out . I can test the json POST part of this function, but I can't find what json to trigger a test of a new file write to cloud storage - the test says must include a bucket but I don't know what json to format (the json I use to test the post -> store to cloud storage doesn't work)
Here is the full function which I've pulled into it's own function:
(function () {
'use strict';
// Get a reference to the Cloud Storage component
const storage = require('#google-cloud/storage')();
// Get a reference to the BigQuery component
const bigquery = require('#google-cloud/bigquery')();
function getTable () {
const dataset = bigquery.dataset("iterableToBigquery");
return dataset.get({ autoCreate: true })
.then(([dataset]) => dataset.table("iterableToBigquery").get({ autoCreate: true }));
}
//set trigger for new files to google storage bucket
exports.iterableToBigquery = (event) => {
const file = event.data;
if (file.resourceState === 'not_exists') {
// This was a deletion event, we don't want to process this
return;
}
return Promise.resolve()
.then(() => {
if (!file.bucket) {
throw new Error('Bucket not provided. Make sure you have a "bucket" property in your request');
} else if (!file.name) {
throw new Error('Filename not provided. Make sure you have a "name" property in your request');
}
return getTable();
})
.then(([table]) => {
const fileObj = storage.bucket(file.bucket).file(file.name);
console.log(`Starting job for ${file.name}`);
const metadata = {
autodetect: true,
sourceFormat: 'NEWLINE_DELIMITED_JSON'
};
return table.import(fileObj, metadata);
})
.then(([job]) => job.promise())
//.then(([ job ]) => waitJobFinish(job))
.then(() => console.log(`Job complete for ${file.name}`))
.catch((err) => {
console.log(`Job failed for ${file.name}`);
return Promise.reject(err);
});
};
}());
So I couldn't figure out how to fix google's example, but I was able to get this load from js to work with the following code in google cloud function:
'use strict';
/*jshint esversion: 6 */
// Get a reference to the Cloud Storage component
const storage = require('#google-cloud/storage')();
// Get a reference to the BigQuery component
const bigquery = require('#google-cloud/bigquery')();
exports.iterableToBigquery = (event) => {
const file = event.data;
if (file.resourceState === 'not_exists') {
// This was a deletion event, we don't want to process this
return;
}
const importmetadata = {
autodetect: false,
sourceFormat: 'NEWLINE_DELIMITED_JSON'
};
let job;
// Loads data from a Google Cloud Storage file into the table
bigquery
.dataset("analytics")
.table("iterable")
.import(storage.bucket(file.bucket).file(file.name),importmetadata)
.then(results => {
job = results[0];
console.log(`Job ${job.id} started.`);
// Wait for the job to finish
return job;
})
.then(metadata => {
// Check the job's status for errors
const errors = metadata.status.errors;
if (errors && errors.length > 0) {
throw errors;
}
})
.then(() => {
console.log(`Job ${job.id} completed.`);
})
.catch(err => {
console.error('ERROR:', err);
});
};

Categories