how to define error message in sails.js - node.js

It's my first time using sails and it looks like it's good but I run into a problem, is it possible to define a custom error message in sails model validation because it looks like the error message being returned is to technical and not user friendly.
Thanks
Update: https://gist.github.com/mikermcneil/8366092

Here's another alternative:
/**
* Takes a Sails Model object (e.g. User) and a ValidationError object and translates it into a friendly
* object for sending via JSON to client-side frameworks.
*
* To use add a new object on your model describing what validation errors should be translated:
*
* module.exports = {
* attributes: {
* name: {
* type: 'string',
* required: true
* }
* },
*
* validation_messages: {
* name: {
* required: 'you have to specify a name or else'
* }
* }
* };
*
* Then in your controller you could write something like this:
*
* var validator = require('sails-validator-tool');
*
* Mymodel.create(options).done(function(error, mymodel) {
* if(error) {
* if(error.ValidationError) {
* error_object = validator(Mymodel, error.Validation);
* res.send({result: false, errors: error_object});
* }
* }
* });
*
* #param model {Object} An instance of a Sails.JS model object.
* #param validationErrors {Object} A standard Sails.JS validation object.
*
* #returns {Object} An object with friendly validation error conversions.
*/
module.exports = function(model, validationError) {
var validation_response = {};
var messages = model.validation_messages;
validation_fields = Object.keys(messages);
validation_fields.forEach(function(validation_field) {
if(validationError[validation_field]) {
var processField = validationError[validation_field];
//console.log(processField);
processField.forEach(function(rule) {
if(messages[validation_field][rule.rule]) {
if(!(validation_response[validation_field] instanceof Array)) {
validation_response[validation_field] = new Array();
}
var newMessage={};
newMessage[rule.rule] = messages[validation_field][rule.rule];
validation_response[validation_field].push(newMessage);
}
});
}
});
return validation_response;
};
Credits to: sfb_

Here is an ugly fix:
make a folder named my-validation-utils. create a index.js file there. and put the following content there:
var user = {
email:{
required:'Email Required',
email:'Should be an email'
},
name:{
required:'name required'
}
};
var product={
name:{
required:'Product name is required'
}
}
var validationMessages = {
user:user,
product:product
};
/**
* This function expects the name of the model and error.validationError
* and puts the user defined messages in error.validationError
*/
module.exports = function(model,validationError){
var messages = validationMessages[model];
for(key in messages){
var element = messages[key];
if(validationError[key]){
for(i in validationError[key]){
var err = validationError[key][i];
err.message = element[err.rule];
}
}
}
return validationError;
};
Now in your controller do something like this:
User.create(user).done(function (error, user) {
if (error) {
if (error.ValidationError) {
var validator = require('path/to/my-validation-utils');
var errors = validator('user',error.ValidationError);// puts the messages for model user
//now errors contains the validationErrors with user defined messages
}
} else {
//user is saved
}
});

Related

Node ExcelJS writing file and saving to S3 heap out of memory issue

I have a download center module where I am generating multiple excel files S3 links at the same against the passed filters from frontend using mongodb aggregations. It all works fine however it is taking in a lot of memory and sometimes report generation crashes due to javascript heap out of memory error. I need to know how I can resolve this heap out of memory error. I am using latest version of exceljs(4.3.0) and using LoopbackJS 3(deprecated) as Node framework. Here are some code snippets which might help you guys with figuring out the issue.
const excelReport = new ExcelReport(report.name);
excelReport.setHeaders(headers);
const aggregation = executeEventBaseAggregation(
"RiderDeliveryBatch",
pipeline
);
aggregation.event.on("data", function (x) {
let rows = {};
rows["Date Created At"] = utils.localDateTime(x.parcel.createdAt);
rows["Pickup Date"] = utils.localDateTime(x.parcelStatuses.find(
(y) =>
y.statusRepositoryId.toString() ===
statuses.find((val) => val.key === "parcel-picked-up").id.toString()
).createdAt);
rows["CN"] = x.parcel._id;
rows["Vendor Order ID"] = x.parcel.vendorParcelId;
rows["Vendor Name"] = x.vendor.name;
rows["Consignee First Name"] = x.customerData.firstName;
rows["Consignee Last Name"] = x.customerData.lastName;
rows["Address"] = x.customerData.address;
rows["Origin City"] = getCities(x.parcel.originCityId);
rows["Destination City"] = cities.find(
(y) => y.id.toString() === x.customerData.cityId.toString()
).name;
rows["Destination subCity"] = x.customerData["subCity"] ? x.customerData["subCity"] : "";
rows["COD"] = x.parcel.amount;
rows["Attempts"] = confirmDispatched.length;
excelReport.addRow(rows);
});
aggregation.event.on("error", function (e) {
console.log("aggregation error");
updateDownloadCenterReport(downloadCenterReport, {
status: "error",
e,
});
});
aggregation.event.on("done", function () {
console.log("aggregation done");
excelReport
.upload()
.then(async (link) => {
await updateDownloadCenterReport(downloadCenterReport, {
excelLink: link,
status: "success",
pdfLink: "",
});
longRunningTaskEnd();
})
.catch(async (error) => {
await updateDownloadCenterReport(downloadCenterReport, {
status: "error",
error,
});
longRunningTaskEnd();
});
});
ExcelJS util module
class ExcelReport {
/**
* Initializes excel sheet with provided worksheet name
* #param {string} worksheetName
*/
constructor(worksheetName) {
this.workbook = new Excel.Workbook();
this.worksheet = this.workbook.addWorksheet(worksheetName);
}
/**
*
* #param {{header:string;key:string;width:number}[]} headers
*/
setHeaders(headers) {
this.worksheet.columns = headers;
return this;
}
/**
*
* #param {{}} row
*/
addRow(row) {
this.worksheet.addRow(row);
return this;
}
/**
*
* #param {{}[]} rows
*/
addRows(rows) {
this.worksheet.addRows(rows);
return this;
}
/**
* uploads the file to AWS
* #returns {Promise<string>}
*/
async upload() {
const aws = new FileUploadService(
awsConfig.accessKeyId,
awsConfig.secretAccessKey,
awsConfig.bucketName
);
const bufferFile = await this.workbook.xlsx.writeBuffer();
const link = await aws.uploadBuffer(
"download-center",
bufferFile,
"xlsx",
this.worksheet.name
);
return link;
}
}

Delete a Document with all Subcollections and Nested Subcollections in Firestore

How can you delete a Document with all it's collections and nested subcollections? (inside the functions environment)
In the RTDB you can ref.child('../someNode).setValue(null) and that completes the desired behavior.
I can think of two ways you could achieve the desired delete behavior, both with tremendously ghastly drawbacks.
Create a 'Super' function that will spider every document and delete them in a batch.
This function would be complicated, brittle to changes, and might take a lengthy execution time.
Add 'onDelete' triggers for each Document type, and make it delete any direct subcollections. You'll call delete on the root document, and the deletion calls will propagate down the 'tree'. This is sluggish, scales atrociously and is costly due to the colossal load of function executions.
Imagine you would have to delete a 'GROUP' and all it's children. It would be deeply chaotic with #1 and pricey with #2 (1 function call per doc)
groups > GROUP > projects > PROJECT > files > FILE > assets > ASSET
> urls > URL
> members > MEMBER
> questions > QUESTION > answers > ANSWER > replies > REPLY
> comments > COMMENT
> resources > RESOURCE > submissions > SUBMISSION
> requests > REQUEST
Is there a superior/favored/cleaner way to delete a document and all it's nested subcollections?
It ought to be possible considering you can do it from the console.
according to firebase documentation:
https://firebase.google.com/docs/firestore/solutions/delete-collections
Deleting collection with nested subcollections might be done easy and neat with node-JS on the server side.
const client = require('firebase-tools');
await client.firestore
.delete(collectionPath, {
project: process.env.GCLOUD_PROJECT,
recursive: true,
yes: true
});
Unfortunately, your analysis is spot on and indeed this use case does require a lot of ceremony. According to official documentation, there is no support for deep deletes in a single shot in firestore neither via client libraries nor rest-api nor the cli tool.
The cli is open sourced and its implementation lives here: https://github.com/firebase/firebase-tools/blob/master/src/firestore/delete.js. They basically implemented option 1. you described in your question, so you can take some inspiration from there.
Both options 1. and 2. are far from ideal situation and to make your solution 100% reliable you will need to keep a persistent queue with deletion tasks, as any error in the long running procedure will leave your system in some ill-defined state.
I would discourage to go with raw option 2. as recursive cloud function calls may very easily went wrong - for example, hitting max. limits.
In case the link changed, below the full source of https://github.com/firebase/firebase-tools/blob/master/src/firestore/delete.js:
"use strict";
var clc = require("cli-color");
var ProgressBar = require("progress");
var api = require("../api");
var firestore = require("../gcp/firestore");
var FirebaseError = require("../error");
var logger = require("../logger");
var utils = require("../utils");
/**
* Construct a new Firestore delete operation.
*
* #constructor
* #param {string} project the Firestore project ID.
* #param {string} path path to a document or collection.
* #param {boolean} options.recursive true if the delete should be recursive.
* #param {boolean} options.shallow true if the delete should be shallow (non-recursive).
* #param {boolean} options.allCollections true if the delete should universally remove all collections and docs.
*/
function FirestoreDelete(project, path, options) {
this.project = project;
this.path = path;
this.recursive = Boolean(options.recursive);
this.shallow = Boolean(options.shallow);
this.allCollections = Boolean(options.allCollections);
// Remove any leading or trailing slashes from the path
if (this.path) {
this.path = this.path.replace(/(^\/+|\/+$)/g, "");
}
this.isDocumentPath = this._isDocumentPath(this.path);
this.isCollectionPath = this._isCollectionPath(this.path);
this.allDescendants = this.recursive;
this.parent = "projects/" + project + "/databases/(default)/documents";
// When --all-collections is passed any other flags or arguments are ignored
if (!options.allCollections) {
this._validateOptions();
}
}
/**
* Validate all options, throwing an exception for any fatal errors.
*/
FirestoreDelete.prototype._validateOptions = function() {
if (this.recursive && this.shallow) {
throw new FirebaseError("Cannot pass recursive and shallow options together.");
}
if (this.isCollectionPath && !this.recursive && !this.shallow) {
throw new FirebaseError("Must pass recursive or shallow option when deleting a collection.");
}
var pieces = this.path.split("/");
if (pieces.length === 0) {
throw new FirebaseError("Path length must be greater than zero.");
}
var hasEmptySegment = pieces.some(function(piece) {
return piece.length === 0;
});
if (hasEmptySegment) {
throw new FirebaseError("Path must not have any empty segments.");
}
};
/**
* Determine if a path points to a document.
*
* #param {string} path a path to a Firestore document or collection.
* #return {boolean} true if the path points to a document, false
* if it points to a collection.
*/
FirestoreDelete.prototype._isDocumentPath = function(path) {
if (!path) {
return false;
}
var pieces = path.split("/");
return pieces.length % 2 === 0;
};
/**
* Determine if a path points to a collection.
*
* #param {string} path a path to a Firestore document or collection.
* #return {boolean} true if the path points to a collection, false
* if it points to a document.
*/
FirestoreDelete.prototype._isCollectionPath = function(path) {
if (!path) {
return false;
}
return !this._isDocumentPath(path);
};
/**
* Construct a StructuredQuery to find descendant documents of a collection.
*
* See:
* https://firebase.google.com/docs/firestore/reference/rest/v1beta1/StructuredQuery
*
* #param {boolean} allDescendants true if subcollections should be included.
* #param {number} batchSize maximum number of documents to target (limit).
* #param {string=} startAfter document name to start after (optional).
* #return {object} a StructuredQuery.
*/
FirestoreDelete.prototype._collectionDescendantsQuery = function(
allDescendants,
batchSize,
startAfter
) {
var nullChar = String.fromCharCode(0);
var startAt = this.parent + "/" + this.path + "/" + nullChar;
var endAt = this.parent + "/" + this.path + nullChar + "/" + nullChar;
var where = {
compositeFilter: {
op: "AND",
filters: [
{
fieldFilter: {
field: {
fieldPath: "__name__",
},
op: "GREATER_THAN_OR_EQUAL",
value: {
referenceValue: startAt,
},
},
},
{
fieldFilter: {
field: {
fieldPath: "__name__",
},
op: "LESS_THAN",
value: {
referenceValue: endAt,
},
},
},
],
},
};
var query = {
structuredQuery: {
where: where,
limit: batchSize,
from: [
{
allDescendants: allDescendants,
},
],
select: {
fields: [{ fieldPath: "__name__" }],
},
orderBy: [{ field: { fieldPath: "__name__" } }],
},
};
if (startAfter) {
query.structuredQuery.startAt = {
values: [{ referenceValue: startAfter }],
before: false,
};
}
return query;
};
/**
* Construct a StructuredQuery to find descendant documents of a document.
* The document itself will not be included
* among the results.
*
* See:
* https://firebase.google.com/docs/firestore/reference/rest/v1beta1/StructuredQuery
*
* #param {boolean} allDescendants true if subcollections should be included.
* #param {number} batchSize maximum number of documents to target (limit).
* #param {string=} startAfter document name to start after (optional).
* #return {object} a StructuredQuery.
*/
FirestoreDelete.prototype._docDescendantsQuery = function(allDescendants, batchSize, startAfter) {
var query = {
structuredQuery: {
limit: batchSize,
from: [
{
allDescendants: allDescendants,
},
],
select: {
fields: [{ fieldPath: "__name__" }],
},
orderBy: [{ field: { fieldPath: "__name__" } }],
},
};
if (startAfter) {
query.structuredQuery.startAt = {
values: [{ referenceValue: startAfter }],
before: false,
};
}
return query;
};
/**
* Query for a batch of 'descendants' of a given path.
*
* For document format see:
* https://firebase.google.com/docs/firestore/reference/rest/v1beta1/Document
*
* #param {boolean} allDescendants true if subcollections should be included,
* #param {number} batchSize the maximum size of the batch.
* #param {string=} startAfter the name of the document to start after (optional).
* #return {Promise<object[]>} a promise for an array of documents.
*/
FirestoreDelete.prototype._getDescendantBatch = function(allDescendants, batchSize, startAfter) {
var url;
var body;
if (this.isDocumentPath) {
url = this.parent + "/" + this.path + ":runQuery";
body = this._docDescendantsQuery(allDescendants, batchSize, startAfter);
} else {
url = this.parent + ":runQuery";
body = this._collectionDescendantsQuery(allDescendants, batchSize, startAfter);
}
return api
.request("POST", "/v1beta1/" + url, {
auth: true,
data: body,
origin: api.firestoreOrigin,
})
.then(function(res) {
// Return the 'document' property for each element in the response,
// where it exists.
return res.body
.filter(function(x) {
return x.document;
})
.map(function(x) {
return x.document;
});
});
};
/**
* Progress bar shared by the class.
*/
FirestoreDelete.progressBar = new ProgressBar("Deleted :current docs (:rate docs/s)", {
total: Number.MAX_SAFE_INTEGER,
});
/**
* Repeatedly query for descendants of a path and delete them in batches
* until no documents remain.
*
* #return {Promise} a promise for the entire operation.
*/
FirestoreDelete.prototype._recursiveBatchDelete = function() {
var self = this;
// Tunable deletion parameters
var readBatchSize = 7500;
var deleteBatchSize = 250;
var maxPendingDeletes = 15;
var maxQueueSize = deleteBatchSize * maxPendingDeletes * 2;
// All temporary variables for the deletion queue.
var queue = [];
var numPendingDeletes = 0;
var pagesRemaining = true;
var pageIncoming = false;
var lastDocName;
var failures = [];
var retried = {};
var queueLoop = function() {
if (queue.length == 0 && numPendingDeletes == 0 && !pagesRemaining) {
return true;
}
if (failures.length > 0) {
logger.debug("Found " + failures.length + " failed deletes, failing.");
return true;
}
if (queue.length <= maxQueueSize && pagesRemaining && !pageIncoming) {
pageIncoming = true;
self
._getDescendantBatch(self.allDescendants, readBatchSize, lastDocName)
.then(function(docs) {
pageIncoming = false;
if (docs.length == 0) {
pagesRemaining = false;
return;
}
queue = queue.concat(docs);
lastDocName = docs[docs.length - 1].name;
})
.catch(function(e) {
logger.debug("Failed to fetch page after " + lastDocName, e);
pageIncoming = false;
});
}
if (numPendingDeletes > maxPendingDeletes) {
return false;
}
if (queue.length == 0) {
return false;
}
var toDelete = [];
var numToDelete = Math.min(deleteBatchSize, queue.length);
for (var i = 0; i < numToDelete; i++) {
toDelete.push(queue.shift());
}
numPendingDeletes++;
firestore
.deleteDocuments(self.project, toDelete)
.then(function(numDeleted) {
FirestoreDelete.progressBar.tick(numDeleted);
numPendingDeletes--;
})
.catch(function(e) {
// For server errors, retry if the document has not yet been retried.
if (e.status >= 500 && e.status < 600) {
logger.debug("Server error deleting doc batch", e);
// Retry each doc up to one time
toDelete.forEach(function(doc) {
if (retried[doc.name]) {
logger.debug("Failed to delete doc " + doc.name + " multiple times.");
failures.push(doc.name);
} else {
retried[doc.name] = true;
queue.push(doc);
}
});
} else {
logger.debug("Fatal error deleting docs ", e);
failures = failures.concat(toDelete);
}
numPendingDeletes--;
});
return false;
};
return new Promise(function(resolve, reject) {
var intervalId = setInterval(function() {
if (queueLoop()) {
clearInterval(intervalId);
if (failures.length == 0) {
resolve();
} else {
reject("Failed to delete documents " + failures);
}
}
}, 0);
});
};
/**
* Delete everything under a given path. If the path represents
* a document the document is deleted and then all descendants
* are deleted.
*
* #return {Promise} a promise for the entire operation.
*/
FirestoreDelete.prototype._deletePath = function() {
var self = this;
var initialDelete;
if (this.isDocumentPath) {
var doc = { name: this.parent + "/" + this.path };
initialDelete = firestore.deleteDocument(doc).catch(function(err) {
logger.debug("deletePath:initialDelete:error", err);
if (self.allDescendants) {
// On a recursive delete, we are insensitive to
// failures of the initial delete
return Promise.resolve();
}
// For a shallow delete, this error is fatal.
return utils.reject("Unable to delete " + clc.cyan(this.path));
});
} else {
initialDelete = Promise.resolve();
}
return initialDelete.then(function() {
return self._recursiveBatchDelete();
});
};
/**
* Delete an entire database by finding and deleting each collection.
*
* #return {Promise} a promise for all of the operations combined.
*/
FirestoreDelete.prototype.deleteDatabase = function() {
var self = this;
return firestore
.listCollectionIds(this.project)
.catch(function(err) {
logger.debug("deleteDatabase:listCollectionIds:error", err);
return utils.reject("Unable to list collection IDs");
})
.then(function(collectionIds) {
var promises = [];
logger.info("Deleting the following collections: " + clc.cyan(collectionIds.join(", ")));
for (var i = 0; i < collectionIds.length; i++) {
var collectionId = collectionIds[i];
var deleteOp = new FirestoreDelete(self.project, collectionId, {
recursive: true,
});
promises.push(deleteOp.execute());
}
return Promise.all(promises);
});
};
/**
* Check if a path has any children. Useful for determining
* if deleting a path will affect more than one document.
*
* #return {Promise<boolean>} a promise that retruns true if the path has
* children and false otherwise.
*/
FirestoreDelete.prototype.checkHasChildren = function() {
return this._getDescendantBatch(true, 1).then(function(docs) {
return docs.length > 0;
});
};
/**
* Run the delete operation.
*/
FirestoreDelete.prototype.execute = function() {
var verifyRecurseSafe;
if (this.isDocumentPath && !this.recursive && !this.shallow) {
verifyRecurseSafe = this.checkHasChildren().then(function(multiple) {
if (multiple) {
return utils.reject("Document has children, must specify -r or --shallow.", { exit: 1 });
}
});
} else {
verifyRecurseSafe = Promise.resolve();
}
var self = this;
return verifyRecurseSafe.then(function() {
return self._deletePath();
});
};
module.exports = FirestoreDelete;
For those who don't want or can't use cloud functions, I found a recursiveDelete function in the admin sdk:
https://googleapis.dev/nodejs/firestore/latest/Firestore.html#recursiveDelete
// Recursively delete a reference and log the references of failures.
const bulkWriter = firestore.bulkWriter();
bulkWriter
.onWriteError((error) => {
if (error.failedAttempts < MAX_RETRY_ATTEMPTS) {
return true;
} else {
console.log('Failed write at document: ', error.documentRef.path);
return false;
}
});
await firestore.recursiveDelete(docRef, bulkWriter);
i don't know how much helpful for you but test it and compare the execution time which i get use it from fire store doc
/** Delete a collection in batches to avoid out-of-memory errors.
* Batch size may be tuned based on document size (atmost 1MB) and application requirements.
*/
void deleteCollection(CollectionReference collection, int batchSize) {
try {
// retrieve a small batch of documents to avoid out-of-memory errors
ApiFuture<QuerySnapshot> future = collection.limit(batchSize).get();
int deleted = 0;
// future.get() blocks on document retrieval
List<QueryDocumentSnapshot> documents = future.get().getDocuments();
for (QueryDocumentSnapshot document : documents) {
document.getReference().delete();
++deleted;
}
if (deleted >= batchSize) {
// retrieve and delete another batch
deleteCollection(collection, batchSize);
}
} catch (Exception e) {
System.err.println("Error deleting collection : " + e.getMessage());
}
}
As mentioned above, you need to write good bit of code for this. For each document that is to be deleted you need to check if it has one or more collections. If it does, then you need to queue those up for deletion too. I wrote the code below to do this. It's not tested to be scalable to large data sets, which is fine for me as I'm using it to clean up after small scale integration tests. If you need something more scalable, feel free to take this as a starting point and play around with batching more.
class FirebaseDeleter {
constructor(database, collections) {
this._database = database;
this._pendingCollections = [];
}
run() {
return new Promise((resolve, reject) => {
this._callback = resolve;
this._database.getCollections().then(collections => {
this._pendingCollections = collections;
this._processNext();
});
});
}
_processNext() {
const collections = this._pendingCollections;
this._pendingCollections = [];
const promises = collections.map(collection => {
return this.deleteCollection(collection, 10000);
});
Promise.all(promises).then(() => {
if (this._pendingCollections.length == 0) {
this._callback();
} else {
process.nextTick(() => {
this._processNext();
});
}
});
}
deleteCollection(collectionRef, batchSize) {
var query = collectionRef;
return new Promise((resolve, reject) => {
this.deleteQueryBatch(query, batchSize, resolve, reject);
});
}
deleteQueryBatch(query, batchSize, resolve, reject) {
query
.get()
.then(snapshot => {
// When there are no documents left, we are done
if (snapshot.size == 0) {
return 0;
}
// Delete documents in a batch
var batch = this._database.batch();
const collectionPromises = [];
snapshot.docs.forEach(doc => {
collectionPromises.push(
doc.ref.getCollections().then(collections => {
collections.forEach(collection => {
this._pendingCollections.push(collection);
});
})
);
batch.delete(doc.ref);
});
// Wait until we know if all the documents have collections before deleting them.
return Promise.all(collectionPromises).then(() => {
return batch.commit().then(() => {
return snapshot.size;
});
});
})
.then(numDeleted => {
if (numDeleted === 0) {
resolve();
return;
}
// Recurse on the next process tick, to avoid
// exploding the stack.
process.nextTick(() => {
this.deleteQueryBatch(query, batchSize, resolve, reject);
});
})
.catch(reject);
}
}
Solution using Node.js Admin SDK
export const deleteDocument = async (doc: FirebaseFirestore.DocumentReference) => {
const collections = await doc.listCollections()
await Promise.all(collections.map(collection => deleteCollection(collection)))
await doc.delete()
}
export const deleteCollection = async (collection: FirebaseFirestore.CollectionReference) => {
const query = collection.limit(100)
while (true) {
const snap = await query.get()
if (snap.empty) {
return
}
await Promise.all(snap.docs.map(doc => deleteDocument(doc.ref)))
}
}
There is now a simple way delete a document and all of its subcollections using NodeJS.
This was made available in nodejs-firestore version v4.11.0.
From the docs:
recursiveDelete()
Recursively deletes all documents and subcollections at and under the specified level.
import * as admin from 'firebase-admin'
const ref = admin.firestore().doc('my_document')
admin.firestore().recursiveDelete(ref)
You can write a handler which will recursive delete all nested descendants when triggers onDelete Firestore event.
Example of handler:
const deleteDocumentWithDescendants = async (documentSnap: FirebaseFirestore.QueryDocumentSnapshot) => {
return documentSnap.ref.listCollections().then((subCollections) => {
subCollections.forEach((subCollection) => {
return subCollection.get().then((snap) => {
snap.forEach((doc) => {
doc.ref.delete();
deleteDocumentWithDescendants(doc);
});
});
});
});
};
// On any document delete
export const onDocumentDelete = async (documentSnap: FirebaseFirestore.QueryDocumentSnapshot) => {
await deleteDocumentWithDescendants(documentSnap);
};
Tie it up with firestore event:
exports.onDeleteDocument = functions.firestore.document('{collectionId}/{docId}')
.onDelete(onDocumentDelete);
// You can add all the collection hierarchy to object
private collectionsHierarchy = {
groups: [
[
'groups',
'projects',
'files',
'assets',
'urls',
'members'
]
]
};
async deleteDocument(rootDocument: string) {
// if (!rootDocument.startsWith(`groups/${this.groupId()}`)) {
// rootDocument = `groups/${this.groupId()}/${rootDocument}`;
// }
const batchSize: number = 100;
let root = await this.db
.doc(rootDocument)
.get()
.toPromise();
if (!root.exists) {
return;
}
const segments = rootDocument.split('/');
const documentCollection = segments[segments.length - 2];
const allHierarchies = this.collectionsHierarchy[documentCollection];
for (let i = 0; i < allHierarchies.length; i = i + 1) {
const hierarchy = allHierarchies[i];
const collectionIndex = hierarchy.indexOf(documentCollection) + 1;
const nextCollections: [] = hierarchy.slice(collectionIndex);
const stack = [`${root.ref.path}/${nextCollections.shift()}`];
while (stack.length) {
const path = stack.pop();
const collectionRef = this.db.firestore.collection(path);
const query = collectionRef.orderBy('__name__').limit(batchSize);
let deletedIems = await this.deleteQueryBatch(query, batchSize);
const nextCollection = nextCollections.shift();
deletedIems = deletedIems.map(di => `${di}/${nextCollection}`);
stack.push(...deletedIems);
}
}
await root.ref.delete();
}
private async deleteQueryBatch(
query: firebase.firestore.Query,
batchSize: number
) {
let deletedItems: string[] = [];
let snapshot = await query.get();
if (snapshot.size === 0) {
return deletedItems;
}
const batch = this.db.firestore.batch();
snapshot.docs.forEach(doc => {
deletedItems.push(doc.ref.path);
batch.delete(doc.ref);
});
await batch.commit();
if (snapshot.size === 0) {
return deletedItems;
}
const result = await this.deleteQueryBatch(query, batchSize);
return [...deletedItems, ...result];
}
Another solution using Node.js Admin SDK with Batch.
const traverseDocumentRecursively = async (
docRef: FirebaseFirestore.DocumentReference<FirebaseFirestore.DocumentData>,
accumulatedRefs: FirebaseFirestore.DocumentReference<FirebaseFirestore.DocumentData>[],
) => {
const collections = await docRef.listCollections();
if (collections.length > 0) {
for (const collection of collections) {
const snapshot = await collection.get();
for (const doc of snapshot.docs) {
accumulatedRefs.push(doc.ref);
await traverseDocumentRecursively(doc.ref, accumulatedRefs);
}
}
}
};
import { chunk } from 'lodash';
const doc = admin.firestore().collection('users').doc('001');
const accumulatedRefs: FirebaseFirestore.DocumentReference<FirebaseFirestore.DocumentData>[] = [];
await traverseDocumentRecursively(doc, accumulatedRefs);
await Promise.all(
// Each transaction or batch of writes can write to a maximum of 500 documents
chunk(accumulatedRefs, 500).map((chunkedRefs) => {
const batch = admin.firestore().batch();
for (const ref of chunkedRefs) {
batch.delete(ref);
}
return batch.commit();
}),
);
Not sure if this is helpful for anyone here, but I am frequently facing the error "Fatal error deleting docs <list of docs>" when using firebase-tools.firestore.delete method (firebase-tools version 9.22.0).
I am currently handling these deletion failures using the returned error message in order to avoid rewriting the code cited at Oleg Bondarenko's answer. It uses admin.firestore to effectively delete the failed docs.
It's a poor solution since it relies on the error message, but at least it doesn't force us to copy the whole FirestoreDelete code to modify just a few lines of it:
firebase_tools.firestore
.delete(path, {
project: JSON.parse(process.env.FIREBASE_CONFIG!).projectId,
recursive: true,
yes: true,
token: getToken(),
})
.catch((err: Error) => {
if (err.name == "FirebaseError") {
// If recursive delete fails to delete some of the documents,
// parse the failures from the error message and delete it manually
const failedDeletingDocs = err.message.match(
/.*Fatal error deleting docs ([^\.]+)/
);
if (failedDeletingDocs) {
const docs = failedDeletingDocs[1].split(", ");
const docRefs = docs.map((doc) =>
firestore.doc(doc.slice(doc.search(/\(default\)\/documents/) + 19))
);
firestore
.runTransaction(async (t) => {
docRefs.forEach((doc) => t.delete(doc));
return docs;
})
.then((docs) =>
console.log(
"Succesfully deleted docs after failing: " + docs.join(", ")
)
)
.catch((err) => console.error(err));
}
}
});
If you are looking to delete user data, a solution to consider in 2022 is the Delete User Data Firebase Extension.
Once this is active, you can simply delete the user from Firebase Auth to trigger the recursive deletion of the user documents:
import admin from "firebase-admin";
admin.auth().deleteUser(userId);
You can call firebase.firestore().doc("whatever").set() and that will delete everything in that document.
The only way .set does not erase everything is if you set the merge flag to true.
See Firestore Documentation on Add Data
var cityRef = db.collection('cities').doc('BJ');
var setWithMerge = cityRef.set({
capital: true
}, { merge: true });

Send push notifications in nodejs using fcm node module

I want to send push notifications on a device I have device ID , how can I send notifications on that device.
I have used FCM to send the notifications to the users. I used Mongodb,Nodejs and fcm-push node module.
Please see the codes, Hope it will help you.
deviceModel.js :
var mongoose = require('mongoose');
var Schema = require('mongoose').Schema;
var schema = new Schema({
userId: { type: Schema.Types.ObjectId },
userType: { type: String },
deviceType: { type: String, required: true, enum: ['apn', 'gcm'] },
deviceId: { type: String, required: true },
timestamp: { type: Date, default: Date.now }
});
module.exports = mongoose.model('Device', schema);
push.js:
var pushModule = require('./push');
var DeviceModel = require('./DeviceModel');
var highland = require('highland');
var FCM = require('fcm-push');
var serverKey = config.get('fcmServerKey');
var fcm = new FCM(serverKey);
var _ = require('lodash');
/**
* Handle fcm push results and updates/invalidates device ids.
*
* #param {String[]} deviceIds - array of device ids used in send.
* #param {Object} fcmResponse - fcm send response.
*/
function handleFcmSendResult(deviceIds, fcmResponse) {
// fcm results
var results = fcmResponse.results;
// changes for deviceIds
var fcmUpdated = [];
var fcmDeleted = [];
// process results
for (var i = 0; i < results.length; i++) {
var oldId = deviceIds[i];
var deviceResult = results[i];
var msgId = deviceResult.message_id;
var newId = deviceResult.registration_id;
if (_.isString(msgId) && _.isString(newId)) {
// If registration_id is set, replace the original ID with the new value
fcmUpdated.push({ oldId: oldId, newId: newId });
} else {
// Otherwise, get the value of error
var e = deviceResult.error;
if (e === 'Unavailable') {
winston.warn('Push: FCM: Feedback: device unavailable: %s.', oldId);
} else if (e === 'NotRegistered' || e === 'InvalidRegistration') {
// delete invalid devices
fcmDeleted.push(oldId);
}
}
}
// apply changes, in bulk
var bulkOp = DeviceModel.collection.initializeUnorderedBulkOp();
if (fcmUpdated.length > 0) {
fcmUpdated.forEach(function (upd) {
bulkOp.find({ deviceId: upd.oldId }).update({ deviceId: upd.newId, timestamp: Date.now() });
});
// those old ids that are updated, need not be deleted
fcmDeleted = _.difference(fcmDeleted, _.map(fcmUpdated, _.property('oldId')));
}
if (fcmDeleted.length > 0) {
bulkOp.find({ deviceId: { '$in': fcmDeleted } }).remove();
}
console.log(bulkOp);
// run bulk op
bulkOp.execute();
}
/**
* Dispatch FCM push to device ids.
*
* #param {String[]} deviceIds - array of apn device ids.
* #param {String} eventName - event name.
* #param {*} eventData - event data.
*/
function sendFcm(deviceIds, eventName, eventData) {
// payload
var msgOpts = {
priority: 'high',
registration_ids: deviceIds,
data: _.set(eventData, 'eventName', eventName),
notification: eventData,
content_available: true,
mutable_content: true
};
fcm.send(msgOpts)
.then(function (response) {
console.log("SENT :",response);
// handleFcmSendResult(deviceIds, JSON.parse(response));
})
.catch(function (err) {
winston.error('Push: FCM: Error sending push.', err);
})
}
/**
* Sends push notifications to Device docs emitted from stream.
*
* #param {Stream.Readable} docStream - Stream of device docs.
* #param {String} eventName - event name.
* #param {*} eventData - event data.
*/
function streamSend(docStream, eventName, eventData) {
// stream for fcm
var fcmStream = highland();
// batch device ids from sub stream and sent to gcm
fcmStream.batch(1000).each(function (fcmIds) {
sendFcm(fcmIds, eventName, eventData);
});
// split source to sub streams
highland(docStream).each(function (doc) {
fcmStream.write(doc.deviceId);
}).done(function () {
// end sub streams when doc source is done
fcmStream.end();
});
}
/**
* Sends the event via push to all registered devices.
* #param {String} eventName - event name.
* #param {Object} eventData - event data. Can contain a "notification" object with: title, description and icon.
*/
var pushToPublic = function (eventName, eventData) {
var str = DeviceModel.find().cursor();
streamSend(str, eventName, eventData);
}
/**
* Sends the event via push to devices that are mapped to given user ids.
* #param {ObjectId[]} userIds - array of user ids.
* #param {String} eventName - event name.
* #param {Object} eventData - event data. Can contain a "notification" object with: title, description and icon.
*/
var pushToUserIds = function (userIds, eventName, eventData) {
var str = DeviceModel.find({ userId: { '$in': userIds } }).cursor();
streamSend(str, eventName, eventData);
}
// Send notification test function
var sendNotification = function () {
var payload = {
"updatedAt": "2017-06-17T06:12:42.975Z",
"message": "this is notification message",
"typeId": "591452ecad4c6b71bed61089",
"userId": "5912d45f29945b6d649f287e",
"_id": "5913f90d08b4d213f1ded021",
"isRead": false,
"isPublic": true,
// ORDER DELIVERD
"type": "order",
"title_loc_key": "title_order_delivered",
"title_loc_args": ["OrderValue"],
"body_loc_key": "body_order_delivered",
"body_loc_args": ["reminderValue"],
};
// pushToPublic("testEvent", payload);
pushToUserIds(['59562201a544614d47845eef'], "testEvent", payload)
}
sendNotification();
Result:
SENT:{ "multicast_id":1234567891234567890,
"success":1,
"failure":0,
"canonical_ids":0,
"results":[{
"message_id": "0:12345678912345678912345678912345678"
}]
}
you can use nodejs package node-gcm
for more please check this link will you more and solve your problem .
node package1
or
node package2

SmoothStreaming issues on Chromecast

I'm trying to load a smoothstreaming media into the chromecast. For that I've used the samples provided by google:
<body>
<video id='vid' />
<script type="text/javascript"
src="//www.gstatic.com/cast/sdk/libs/receiver/2.0.0/cast_receiver.js">
</script>
<script type="text/javascript"
src="//www.gstatic.com/cast/sdk/libs/mediaplayer/0.3.0/media_player.js">
</script>
<script type="text/javascript">
window.onload = function() {
// If you set ?Debug=true in the URL, such as a different App ID in the
// developer console, include debugging information.
cast.receiver.logger.setLevelValue(cast.receiver.LoggerLevel.DEBUG);
var mediaElement = document.getElementById('vid');
// Create the media manager. This will handle all media messages by default.
window.mediaManager = new cast.receiver.MediaManager(mediaElement);
// Remember the default value for the Receiver onLoad, so this sample can Play
// non-adaptive media as well.
window.defaultOnLoad = mediaManager.onLoad;
mediaManager.onLoad = function (event) {
/proto
// MPEG-DASH
protocol = cast.player.api.CreateDashStreamingProtocol(host);
} else if (url.indexOf('.ism/') >= 0) {
// Smooth Streaming
protocol = cast.player.api.CreateSmoothStreamingProtocol(host);
}
// How to override a method in Host. I know that it's safe to just provide this
// method.
host.onError = function(errorCode) {
console.log("Fatal Error - "+errorCode);
window.player.unload();
};
// If you need cookies, then set withCredentials = true also set any header
// information you need. If you don't need them, there can be some unexpected
// effects by setting this value.
// host.updateSegmentRequestInfo = function(requestInfo) {
// requestInfo.withCredentials = true;
// };
console.log("we have protocol "+ext);
if (protocol !== null) {
console.log("Starting Media Player Library");
window.player = new cast.player.api.Player(host);
window.player.load(protocol, initStart);
}
mediaElement.autoplay = autoplay; // Make sure autoplay get's set
if (url.lastIndexOf('.m3u8') >= 0) {
// HTTP Live Streaming
protocol = cast.player.api.CreateHlsStreamingProtocol(host);
} else if (url.lastIndexOf('.mpd') >= 0) {
// MPEG-DASH
protocol = cast.player.api.CreateDashStreamingProtocol(host);
} else if (url.indexOf('.ism/') >= 0) {
// Smooth Streaming
protocol = cast.player.api.CreateSmoothStreamingProtocol(host);
}
// How to override a method in Host. I know that it's safe to just provide this
// method.
host.onError = function(errorCode) {
console.log("Fatal Error - "+errorCode);
window.player.unload();
};
// If you need cookies, then set withCredentials = true also set any header
// information you need. If you don't need them, there can be some unexpected
// effects by setting this value.
// host.updateSegmentRequestInfo = function(requestInfo) {
// requestInfo.withCredentials = true;
// };
console.log("we have protocol "+ext);
And on the sender:
/**
* global variables
*/
var currentMediaSession = null;
var currentVolume = 0.5;
var progressFlag = 1;
var mediaCurrentTime = 0;
var session = null;
var mediaURLs = [
'http://playready.directtaps.net/smoothstreaming/TTLSS720VC1/To_The_Limit_720.ism/',
'http://commondatastorage.googleapis.com/gtv-videos-bucket/ED_1280.mp4',
'http://commondatastorage.googleapis.com/gtv-videos-bucket/tears_of_steel_1080p.mov',
'http://commondatastorage.googleapis.com/gtv-videos-bucket/reel_2012_1280x720.mp4',
'http://commondatastorage.googleapis.com/gtv-videos-bucket/Google%20IO%202011%2045%20Min%20Walk%20Out.mp3'];
var mediaTitles = [
'Big Buck Bunny',
'Elephant Dream',
'Tears of Steel',
'Reel 2012',
'Google I/O 2011 Audio'];
var mediaThumbs = [
'images/bunny.jpg',
'images/ed.jpg',
'images/Tears.jpg',
'images/reel.jpg',
'images/google-io-2011.jpg'];
var currentMediaURL = mediaURLs[0];
/**
* Call initialization
*/
if (!chrome.cast || !chrome.cast.isAvailable) {
setTimeout(initializeCastApi, 1000);
}
/**
* initialization
*/
function initializeCastApi() {
// default app ID to the default media receiver app
// optional: you may change it to your own app ID/receiver
var applicationID = '21176C05';
var sessionRequest = new chrome.cast.SessionRequest(applicationID);
var apiConfig = new chrome.cast.ApiConfig(sessionRequest,
sessionListener,
receiverListener);
chrome.cast.initialize(apiConfig, onInitSuccess, onError);
};
/**
* initialization success callback
*/
function onInitSuccess() {
appendMessage("init success");
}
/**
* initialization error callback
*/
function onError() {
console.log("error");
appendMessage("error");
}
/**
* generic success callback
*/
function onSuccess(message) {
console.log(message);
}
/**
* callback on success for stopping app
*/
function onStopAppSuccess() {
console.log('Session stopped');
appendMessage('Session stopped');
document.getElementById("casticon").src = 'images/cast_icon_idle.png';
}
/**
* session listener during initialization
*/
function sessionListener(e) {
console.log('New session ID: ' + e.sessionId);
appendMessage('New session ID:' + e.sessionId);
session = e;
if (session.media.length != 0) {
appendMessage(
'Found ' + session.media.length + ' existing media sessions.');
onMediaDiscovered('onRequestSessionSuccess_', session.media[0]);
}
session.addMediaListener(
onMediaDiscovered.bind(this, 'addMediaListener'));
session.addUpdateListener(sessionUpdateListener.bind(this));
}
/**
* session update listener
*/
function sessionUpdateListener(isAlive) {
var message = isAlive ? 'Session Updated' : 'Session Removed';
message += ': ' + session.sessionId;
appendMessage(message);
if (!isAlive) {
session = null;
}
};
/**
* receiver listener during initialization
*/
function receiverListener(e) {
if( e === 'available' ) {
console.log("receiver found");
appendMessage("receiver found");
}
else {
console.log("receiver list empty");
appendMessage("receiver list empty");
}
}
/**
* select a media URL
* #param {string} m An index for media URL
*/
function selectMedia(m) {
console.log("media selected" + m);
appendMessage("media selected" + m);
currentMediaURL = mediaURLs[m];
var playpauseresume = document.getElementById("playpauseresume");
document.getElementById('thumb').src = mediaThumbs[m];
}
/**
* launch app and request session
*/
function launchApp() {
console.log("launching app...");
appendMessage("launching app...");
chrome.cast.requestSession(onRequestSessionSuccess, onLaunchError);
}
/**
* callback on success for requestSession call
* #param {Object} e A non-null new session.
*/
function onRequestSessionSuccess(e) {
console.log("session success: " + e.sessionId);
appendMessage("session success: " + e.sessionId);
session = e;
document.getElementById("casticon").src = 'images/cast_icon_active.png';
}
/**
* callback on launch error
*/
function onLaunchError() {
console.log("launch error");
appendMessage("launch error");
}
/**
* stop app/session
*/
function stopApp() {
session.stop(onStopAppSuccess, onError);
}
/**
* load media
* #param {string} i An index for media
*/
function loadMedia(i) {
if (!session) {
console.log("no session");
appendMessage("no session");
return;
}
console.log("loading..." + currentMediaURL);
appendMessage("loading..." + currentMediaURL);
var mediaInfo = new chrome.cast.media.MediaInfo(currentMediaURL);
mediaInfo.contentType = 'application/vnd.ms-sstr+xml';
var request = new chrome.cast.media.LoadRequest(mediaInfo);
request.autoplay = false;
request.currentTime = 0;
var payload = {
"title:" : mediaTitles[i],
"thumb" : mediaThumbs[i]
};
var json = {
"payload" : payload
};
request.customData = json;
session.loadMedia(request,
onMediaDiscovered.bind(this, 'loadMedia'),
onMediaError);
}
/**
* callback on success for loading media
* #param {Object} e A non-null media object
*/
function onMediaDiscovered(how, mediaSession) {
console.log("new media session ID:" + mediaSession.mediaSessionId);
appendMessage("new media session ID:" + mediaSession.mediaSessionId + ' (' + how + ')');
currentMediaSession = mediaSession;
mediaSession.addUpdateListener(onMediaStatusUpdate);
mediaCurrentTime = currentMediaSession.currentTime;
playpauseresume.innerHTML = 'Play';
document.getElementById("casticon").src = 'images/cast_icon_active.png';
}
/**
* callback on media loading error
* #param {Object} e A non-null media object
*/
function onMediaError(e) {
console.log("media error");
appendMessage("media error");
document.getElementById("casticon").src = 'images/cast_icon_warning.png';
}
/**
* callback for media status event
* #param {Object} e A non-null media object
*/
function onMediaStatusUpdate(isAlive) {
if( progressFlag ) {
document.getElementById("progress").value = parseInt(100 * currentMediaSession.currentTime / currentMediaSession.media.duration);
}
document.getElementById("playerstate").innerHTML = currentMediaSession.playerState;
}
/**
* play media
*/
function playMedia() {
if( !currentMediaSession )
return;
var playpauseresume = document.getElementById("playpauseresume");
if( playpauseresume.innerHTML == 'Play' ) {
currentMediaSession.play(null,
mediaCommandSuccessCallback.bind(this,"playing started for " + currentMediaSession.sessionId),
onError);
playpauseresume.innerHTML = 'Pause';
//currentMediaSession.addListener(onMediaStatusUpdate);
appendMessage("play started");
}
else {
if( playpauseresume.innerHTML == 'Pause' ) {
currentMediaSession.pause(null,
mediaCommandSuccessCallback.bind(this,"paused " + currentMediaSession.sessionId),
onError);
playpauseresume.innerHTML = 'Resume';
appendMessage("paused");
}
else {
if( playpauseresume.innerHTML == 'Resume' ) {
currentMediaSession.play(null,
mediaCommandSuccessCallback.bind(this,"resumed " + currentMediaSession.sessionId),
onError);
playpauseresume.innerHTML = 'Pause';
appendMessage("resumed");
}
}
}
}
/**
* stop media
*/
function stopMedia() {
if( !currentMediaSession )
return;
currentMediaSession.stop(null,
mediaCommandSuccessCallback.bind(this,"stopped " + currentMediaSession.sessionId),
onError);
var playpauseresume = document.getElementById("playpauseresume");
playpauseresume.innerHTML = 'Play';
appendMessage("media stopped");
}
/**
* set media volume
* #param {Number} level A number for volume level
* #param {Boolean} mute A true/false for mute/unmute
*/
function setMediaVolume(level, mute) {
if( !currentMediaSession )
return;
var volume = new chrome.cast.Volume();
volume.level = level;
currentVolume = volume.level;
volume.muted = mute;
var request = new chrome.cast.media.VolumeRequest();
request.volume = volume;
currentMediaSession.setVolume(request,
mediaCommandSuccessCallback.bind(this, 'media set-volume done'),
onError);
}
/**
* mute media
* #param {DOM Object} cb A checkbox element
*/
function muteMedia(cb) {
if( cb.checked == true ) {
document.getElementById('muteText').innerHTML = 'Unmute media';
setMediaVolume(currentVolume, true);
appendMessage("media muted");
}
else {
document.getElementById('muteText').innerHTML = 'Mute media';
setMediaVolume(currentVolume, false);
appendMessage("media unmuted");
}
}
/**
* seek media position
* #param {Number} pos A number to indicate percent
*/
function seekMedia(pos) {
console.log('Seeking ' + currentMediaSession.sessionId + ':' +
currentMediaSession.mediaSessionId + ' to ' + pos + "%");
progressFlag = 0;
var request = new chrome.cast.media.SeekRequest();
request.currentTime = pos * currentMediaSession.media.duration / 100;
currentMediaSession.seek(request,
onSeekSuccess.bind(this, 'media seek done'),
onError);
}
/**
* callback on success for media commands
* #param {string} info A message string
* #param {Object} e A non-null media object
*/
function onSeekSuccess(info) {
console.log(info);
appendMessage(info);
setTimeout(function(){progressFlag = 1},1500);
}
/**
* callback on success for media commands
* #param {string} info A message string
* #param {Object} e A non-null media object
*/
function mediaCommandSuccessCallback(info) {
console.log(info);
appendMessage(info);
}
/**
* append message to debug message window
* #param {string} message A message string
*/
function appendMessage(message) {
var dw = document.getElementById("debugmessage");
dw.innerHTML += '\n' + JSON.stringify(message);
};
I am trying media #1 which is .ism - I also tried with the /Manifest - but no luck.
In the debugging console I am getting:
[214.054s] [cast.player.api.Player] load -3
media_player.js:23
[214.060s] [goog.net.XhrIo] Opening Xhr [GET http://playready.directtaps.net/smoothstreaming/TTLSS720VC1/To_The_Limit_720.ism/Manifest -1]
media_player.js:23
[214.065s] [goog.net.XhrIo] Will abort after 10000ms if incomplete, xhr2 false [GET http://playready.directtaps.net/smoothstreaming/TTLSS720VC1/To_The_Limit_720.ism/Manifest -1]
media_player.js:23
[214.070s] [goog.net.XhrIo] Sending request [GET http://playready.directtaps.net/smoothstreaming/TTLSS720VC1/To_The_Limit_720.ism/Manifest -1]
media_player.js:23
[214.088s] [goog.net.XhrIo] Request complete [GET http://playready.directtaps.net/smoothstreaming/TTLSS720VC1/To_The_Limit_720.ism/Manifest 200]
media_player.js:23
[214.120s] [cast.player.api.Player] sourceopen
media_player.js:23
[214.142s] [cast.player.api.Player] play
media_player.js:23
[214.291s] [cast.receiver.MediaManager] Load metadata error
cast_receiver.js:19
[214.296s] [cast.receiver.MediaManager] Resetting media element
cast_receiver.js:19
[214.303s] [cast.receiver.MediaManager] Sending error message to 4:client-34217
cast_receiver.js:19
[214.307s] [cast.receiver.IpcChannel] IPC message sent: {"namespace":"urn:x-cast:com.google.cast.media","senderId":"4:client-34217","data":"{\"requestId\":9217017,\"type\":\"LOAD_FAILED\"}"}
cast_receiver.js:19
[214.171s] [cast.player.api.Player] error
media_player.js:23
Fatal Error - 1 index.html:61
[214.176s] [cast.player.api.Player] unload -3
Any idea???
Thanks!
Where did you find that URL? It's incomplete, the right one is http://playready.directtaps.net/smoothstreaming/TTLSS720VC1/To_The_Limit_720.ism/manifest, but it is unlikely to work. Other files have been posted at: Microsoft's SmoothStreaming samples files, that said, I don't believe that Microsoft is providing a CORS header that works for Chromecast.
We do provide some very simple media that you can host yourself. And we provide a very simple server, if you need that to host and provide a CORS header as well.

Workflow of my module

I'm writing my first "serious" node.js module and I need some advices/best practices on a couple of things...
Please read comments in the code for the questions!
server.js
var Client = require('./index');
var client = Client.createClient();
client.on('data', function(data) {
console.log(data);
});
client.on('ERROR', function(data) {
console.log(data);
});
node-module.js
exports.Client = require('./Client');
exports.createClient = function(options) {
return new exports.Client(options);
};
/**
* Question 1:
* I want to have an optional WebUI for this, how should it recieve the data?
* I figured via event-emitters is best(?)
* But where should it be placed (and how) so it can access the events from Client.js?
*/
exports.createServer = function(options) {
//...
}
Client.js
Client.Handlers = require('./Handlers');
module.exports = Client;
function Client(options) {
// ...
this._callback_handlers['ERROR'] = Client.Handlers.error
/**
* Question 2:
* Initialize connection to server
*
* Should it be something like this instead:
* this._socket = this.Connect(options); ?
* How do I achieve this?
* And at the same time keeping the buffer-variable local to the Connect function?
*/
this.Connect();
});
Client.prototype.Connect = function() {
var self = this;
var buffer = '';
this._socket = net.connect({
host: self.options.host,
port: self.options.port
}, function() {
})
.on('data', function(chunk) {
var offset, part;
buffer += chunk;
/**
* Question 3:
* Buffer up and parse only whole rows
*
* This is a OK way to do it, right?
* Should I listen for the event "data"
* instead of calling self.parse_response() like a normal function?
*
*/
while((offset = buffer.indexOf("\n")) > -1) {
part = buffer.substr(0, offset);
buffer = buffer.substr(offset + 1);
if(part) {
self.emit('data', part);
self.parse_response(part);
}
}
})
};
Client.prototype.parse_response = function(data) {
// ...
/**
* Question 4:
* Send callbacks
*
* _callback_handlers is an array containing callbacks functions
* (see top of Client.js: this._callback_handlers['ERROR'] = Client.Handlers.error)
*
* I would perfer to use something like this instead:
* this.emit(data[0], data[1]);
*
* But how do I make Handlers.js listen to these events?
* since "Client" dosent exist there
*
* The reason I have handlers.js in the first place
* is to give the code a better structure
*
*/
if(this._callback_handlers[data[0]]) {
this._callback_handlers[data[0]].apply(this, data[1]);
}
}
Handlers.js
//...
exports.error = function(a) {
this.emit('ERROR', a);
}
//...

Resources