Cannot delete files from firebase collection - node.js

I am following the example listed here, except with modifications due to the API of the new firebase-tools.
exports.clearMessages = functions.runWith({ timeoutSeconds: 540, memory: '2GB' }).https.onCall(messagesController.clearMessages)
export const clearMessages = async (data, context) => {
const uid = context.auth.uid
const path = `users/${uid}/messages`
return firebase_tools.firestore.delete('flightApp3', path, {
recursive: true,
shallow: true,
allCollections: true
}).then(result => {
console.log('delete result', result)
return result
})
}
However, when I run this , I see the following displayed in Cloud Functions log:
Unhandled error { Error
at Error.FirebaseError (/user_code/node_modules/firebase-tools/lib/error.js:9:18)
at module.exports (/user_code/node_modules/firebase-tools/lib/getProjectId.js:10:19)
at Command.module.exports (/user_code/node_modules/firebase-tools/lib/requirePermissions.js:11:21)
at /user_code/node_modules/firebase-tools/lib/command.js:154:38
at process._tickDomainCallback (internal/process/next_tick.js:135:7)
name: 'FirebaseError',
message: 'No project active. Run with \u001b[1m--project <projectId>\u001b[22m or define an alias by\nrunning \u001b[1mfirebase use --add\u001b[22m',
children: [],
status: 500,
exit: 1,
stack: 'Error\n at Error.FirebaseError (/user_code/node_modules/firebase-tools/lib/error.js:9:18)\n at module.exports (/user_code/node_modules/firebase-tools/lib/getProjectId.js:10:19)\n at Command.module.exports (/user_code/node_modules/firebase-tools/lib/requirePermissions.js:11:21)\n at /user_code/node_modules/firebase-tools/lib/command.js:154:38\n at process._tickDomainCallback (internal/process/next_tick.js:135:7)',
original: undefined,
context: undefined }
However, I'm pretty sure I have an active project in my firebase CLI.
$ firebase use
Active Project: production (flightApp3)
Project aliases for /Users/myUser/Developer/flightApp3/cloud:
* default (flightApp3)
* production (flightApp3)
Run firebase use --add to define a new project alias.

some options cannot be mixed ...
return firebase_tools.firestore.delete('flightApp3', path, {
// allCollections: true,
recursive: true,
yes: true
}).then(() => {
return {
path: path
};
});
that's how the path is being built up (path and allCollections also do not seem to make sense together): projects/${project}/databases/(default)/documents/users/${uid}/messages
getProjectId.js checks for rc.projects (where options.project is option --project):
module.exports = function(options, allowNull) {
if (!options.project && !allowNull) {
var aliases = _.get(options, "rc.projects", {});
...
these rc.projects are the projects from the .firebaserc file:
{
"projects": {
"default": "flightApp3"
}
}
or run firebase use default to switch from alias production to default (or remove alias production once for a test). FirestoreDelete(project, path, options) also does not care about options.token nor options.project anymore (as the documentation suggests).
$ firebase firestore:delete --help explains the command-line options:
Usage: firestore:delete [options] [path]
Delete data from Cloud Firestore.
Options:
-r, --recursive Recursive. Delete all documents and sub-collections.
Any action which would result in the deletion of child
documents will fail if this argument is not passed.
May not be passed along with --shallow.
--shallow Shallow. Delete only parent documents and ignore documents
in sub-collections. Any action which would orphan documents
will fail if this argument is not passed.
May not be passed along with --recursive.
--all-collections Delete all. Deletes the entire Firestore database,
including all collections and documents.
Any other flags or arguments will be ignored.
-y, --yes No confirmation. Otherwise, a confirmation prompt will appear.
the npm package (the output above) is at version 6.0.1.
just found a relevant comment (but possibly obsolete):
The token must be set in the functions config, and can be generated at the command line by running firebase login:ci.
this hints for environment configuration, so that functions.config().fb.token has the token:
firebase functions:config:set fb.token="THE TOKEN"
one can also obtain the projectId from process.env.FIREBASE_CONFIG.projectId.

Docs https://firebase.google.com/docs/firestore/solutions/delete-collections
In /functions directory install firebase-tools
"firebase-tools": "^7.16.2"
In cloud function import firebase-tools and call delete
const firebaseTools = require("firebase-tools");
...
firebaseTools.firestore.delete(workspaceRef.path, {
project: process.env.GCLOUD_PROJECT, // required
recursive: true, // required
yes: true // required
})
There is no need for a token when calling firebase-tools from a cloud function.
Also the link to the API https://github.com/firebase/firebase-tools/blob/v7.16.2/src/firestore/delete.js with code implementation for FirestoreDelete seems wrong.
I'm successfully calling .delete(path, options) but the code says .delete(project, path, options)?

Related

Evaluate a Cypress configuration file

In my build scripts I need to evaluate a Cypress configuration file. I'm using the following script:
let appdata = process.env.LOCALAPPDATA;
let version = `11.0.1`;
let src = `${appdata}/Cypress/Cache/${version}/Cypress/resources/app/node_modules/#packages/data-context/src`;
const DataContext = require(`${src}/DataContext.js`).DataContext;
const ProjectConfigManager = require(`${src}/data/ProjectConfigManager.js`).ProjectConfigManager;
(async() => {
const ctx = new DataContext({
schema: null,
schemaCloud: null,
modeOptions: "run",
appApi: {},
localSettingsApi: {},
authApi: {
} ,
configApi: {
},
projectApi: {
} ,
electronApi: {
} ,
browserApi: {
},
})
let configManager = new ProjectConfigManager({
ctx,
configFile: 'C:\\work\\sample\\sample.config.ts',
projectRoot: 'C:\\work\\sample',
handlers: [],
hasCypressEnvFile: false,
eventRegistrar: null/*new EventRegistrar()*/,
onError: (error) => {},
onInitialConfigLoaded: () => {},
onFinalConfigLoaded: () => Promise.resolve(),
refreshLifecycle: () => Promise.resolve(),
})
configManager.configFilePath = "sample.config.ts"
configManager.setTestingType('e2e')
let cfg = await configManager.getConfigFileContents()
console.log(JSON.stringify(cfg));
})();
It works well for Cypress 10 version.
However, Cypress 11 has introduced some changes that break this script. Though I adjusted the paths, I'm still unable to make it work again.
It currently fails with this error:
Error: Cannot find module 'C:\Users\mbolotov\AppData\Local\Cypress\Cache\11.0.1\Cypress\resources\app\node_modules\graphql\index'. Please verify that the package.json has a valid "main" entry
How can I fix this problem (without making changes to the Cypress installation)?
OR
Is there any other way to evaluate a Cypress configuration file (say from the command line) and obtain its values?
The exact usage is unclear to me, but making some assumptions - a nodejs script in the /scripts folder of the project can compile and resolve the config using the Cypress module API.
It would need a test to run, a "null-test" can be generated from inside the script.
Note, the null-test must conform to the spec pattern of the project (below it's the std .cy.js)
const cypress = require('cypress')
const fs = require('fs')
fs.writeFileSync('../cypress/e2e/null-test.cy.js', 'it("", ()=>{})')
cypress.run({
project: '..',
spec: '../cypress/e2e/null-test.cy.js',
quiet: true
}).then(results => {
if (results.status === 'failed') {
console.log(results)
} else {
console.log(results.config.resolved) // output resolved config
}
})
I haven't attempted to duplicate everything you have in your code, as it's using Cypress internals and not publicly documented.
This may be because of Changelog 11.0.0
We have also massively improved our startup performance by shipping a snapshot of our binary instead of the source files.
Looking inside the cache folder for v10.11.0 (${process.env.CYPRESS_CACHE_FOLDER}/10.11.0/Cypress/resources/app), the /node_modules is fully populated and /node_modules/graphql/index.js exists.
But in v11.0.1 /node_modules/graphql is not fully populated.

How to create a custom blueprint?

I'm trying to create a customized JHipster blueprint for my organization.
I've started my journey:
Installed Yeoman v4.3.0
Installed Jhipster v7.9.3
Created a directory for my future blueprint mkdir mygenerator && cd mygenerator
Executed the command to create a new blueprint: jhipster generate-blueprint
selected only the sub-generator server
add a cli: Y
Is server generator a side-by-side blueprint: Y
Is server generator a cli command: N
selected the tasks: initializing, prompting and configuring
From this point, I've opened the generated blueprint project with VS Code and noticed a first problem, some jhipster packages can't be resolved:
Unable to resolve path to module 'generator-jhipster/esm/generators/server'
Unable to resolve path to module 'generator-jhipster/esm/priorities'
I also noticed that the generator created for me has a small difference from the existing generators in the JHipster Github, such as jhipster-dotnetcore, generator-jhipster-quarkus, generator-jhipster-nodejs: the returned functions are async while in the cited repos they are regular functions (sync):
get [INITIALIZING_PRIORITY]() {
return {
async initializingTemplateTask() {},
};
}
Does it make any difference in this Jhipster version or there is no problem if I return the same way as jhipster-dotnetcore:
get initializing() {
return {
...super._initializing(),
setupServerConsts() {
this.packagejs = packagejs;
...
I've assumed that this detail is not important and followed with async function and write my prompting function to get some input from the user/developer in order to replace values in the template files :
get [PROMPTING_PRIORITY]() {
return {
...super._prompting(),
async promptingTemplateTask() {
const choices = [
{
name: 'OAuth 2.0 Protocol',
value: 'oauth2',
},
{
name: 'CAS Protocol',
value: 'cas',
},
];
const PROMPTS = {
type: 'list',
name: 'authenticationProtocol',
message: 'Which authentication protocol do you want to use?',
choices,
default: 'oauth2',
};
const done = this.async();
if (choices.length > 0) {
this.prompt(PROMPTS).then(prompt => {
this.authenticationProtocol = this.jhipsterConfig.authenticationProtocol = prompt.authenticationProtocol;
done();
});
} else {
done();
}
},
};
}
<%_ if (authenticationProtocol == 'oauth2') { _%>
security:
enable-csrf: true
oauth2:
client:
clientId: ${this.baseName}
clientSecret: Z3ByZXBmdGVy
accessTokenUri: http://localhost:8443/oauth2.0/accessToken
userAuthorizationUri: http://localhost:8443/oauth2.0/authorize
tokenName: oauth_token
authenticationScheme: query
clientAuthenticationScheme: form
logoutUri: http://localhost:8443/logout
clientSuccessUri: http://localhost:4200/#/login-success
resource:
userInfoUri: http://localhost:8443/oauth2.0/profile
<%_ } _%>
thymeleaf:
mode: HTML
templates/src/test/java/resources/config/application.yml.ejs
All this done, I've followed the next steps:
Run npm link inside the blueprint directory.
Created a new directory for a app example: mkdir appmygenerator && cd appmygenerator
Started a new example app with my blueprint: jhipster --blueprint mygenerator --skip-git --skip-install --skip-user-management --skip-client answering all question.
Here I've got some surprises:
After answering What is the base name of your application? I've got this warning: [DEP0148] DeprecationWarning: Use of deprecated folder mapping "./lib/util/" in the "exports" field module resolution of the package at /...<my-generator-path>/node_modules/yeoman-environment/package.json. Update this package.json to use a subpath pattern like "./lib/util/*"
My prompting function somehow made some questions be repeated, from question Do you want to make it reactive with Spring WebFlux? until Which other technologies would you like to use?.
When my prompt was finally shown, there was a message in front of the last option: CAS Protocol Run-async wrapped function (sync) returned a promise but async() callback must be executed to resolve
I've made some changes to my prompt function: removed the calling of super._prompting() with the hope to solve the item 2, and removed the async in the hope to solve the item 3.
Well ... apparently it was solved. But I get a new error when JHipster (or Yeoman) try process the template:
An error occured while running jhipster:server#writeFiles
ERROR! /home/fabianorodrigo/Downloads/my-blueprint/generators/server/templates/src/test/resources/config/application.yml.ejs:47
45| favicon:
46| enabled: false
>> 47| <%_ if (authenticationProtocol == 'oauth2') { _%>
48| security:
49| enable-csrf: true
50| oauth2:
authenticationProtocol is not defined
How come authenticationProtocol is not defined? I'm stuck here. What I could noticed is that, in all the Jhipster's generators I've cited above, the prompt function sets the properties like "this.[property] = [value]" and the "this.jhipsterConfig.[property] = [value]" and in the templates they are referenced (just the property's name) and it works.
What am I missing? Why even if I set the property "this.authenticationProtocol" in the function prompting it is not seem at the template?
Yeoman (yo/yeoman-generator/yeoman-environment) are not required and should no be a dependency to avoid duplication in the dependency tree, unless you know what you are doing. JHipster customizes them, yeoman-test is required by tests.
Unable to resolve path to module is a bug at eslint-plugin-import
I also noticed that the generator created for me has a small difference from the existing generators in the JHipster Github, such as jhipster-dotnetcore, generator-jhipster-quarkus, generator-jhipster-nodejs. Those blueprints are quite old (blueprint support is changing very fast for v8/esm) and are full server/backend replacements, seems you are trying to add cas support. The use case is quite different.
Does it make any difference in this Jhipster version or there is no problem if I return the same way as jhipster-dotnetcore? Yes, get [INITIALIZING_PRIORITY]() is the new notation, and INITIALIZING_PRIORITY may be >initializing instead of initializing. The explanation is here. JHipster v8 will not support the old notation.
...super._prompting(), is used to ask original prompts, since this is a side-by-side blueprint, prompts will be duplicated.
[DEP0148] DeprecationWarning: Use of deprecated folder mapping "./lib/util/" is a bug in yeoman-environment, and should be fixed in next version.
CAS Protocol Run-async wrapped function (sync) returned a promise but async() callback must be executed to resolve is shown because you are using async function with const done = this.async(); done(); together.
this.async() is a to support async through callbacks before Promises were a js default.
There are a few blueprints that uses new notation and can be used as inspiration: native, ionic, jooq and entity-audit.
I didn't see anything about the writing priority, so it looks like you are overriding an existing template and the original generator will write it. For this reason you should inject you configuration into the original generator.
The end result should be something like:
get [INITIALIZING_PRIORITY]() {
return {
async initializingTemplateTask() {
this.info('this blueprint adds support to cas authentication protocol');
},
};
}
get [PROMPTING_PRIORITY]() {
return {
async promptingTemplateTask() {
await this.prompt({
type: 'list',
name: 'authenticationProtocol',
message: 'Which authentication protocol do you want to use?',
choices: [
{
name: 'OAuth 2.0 Protocol',
value: 'oauth2',
},
{
name: 'CAS Protocol',
value: 'cas',
},
],
default: 'oauth2',
}, this.blueprintStorage); // <- `this.blueprintStorage` tells the prompt function to store the configuration inside `.yo-rc.json` at the blueprint namespace.
},
};
}
get [CONFIGURING_PRIORITY]() {
return {
configuringTemplateTask() {
// Store the default configuration
this.blueprintConfig.authenticationProtocol = this.blueprintConfig.authenticationProtocol || 'oauth2';
},
};
}
get [LOADING_PRIORITY]() {
return {
loadingTemplateTask() {
// Load the stored configuration, the prompt can be skipped so this needs to be in another priority.
this.authenticationProtocol = this.blueprintConfig.authenticationProtocol;
// Inject the configuration into the original generator. If you are writing the template by yourself, this may be not necessary.
this.options.jhipsterContext.authenticationProtocol = this.blueprintConfig.authenticationProtocol;
},
};
}

Netlify functions not found on dev server

I have followed several blogs and questions related to the same problem I am having. It is exactly like this question. However, I am still having issues.
So, I am running netlify dev and trying to access my netlify functions. I have a function in /netlify/functions/ping. The function works as intended when I access the randomized port for the netlify functions (something like localhost:55832...).
However, using the localhost:8888/.netlify/functions/ping gives me a 404 error.
Here is my /netlify/functions/ping file:
import { Handler } from '#netlify/functions';
const handler: Handler = async (event, context) => {
return {
statusCode: 200,
body: JSON.stringify({ data: "pong" }),
};
};
export { handler };
here is where I am trying to call my function on a page
export default function HomePage() {
useEffect(() => {
async function pingpong() {
const res = await fetch(`/.netlify/functions/ping`);
console.log(res);
}
pingpong();
}, []);
return (
...
I have also tried to alter my netlify.toml with the following
[[redirects]]
from = "/api/*"
to = "/.netlify/functions/:splat"
status = 200
force = true
what's start script you use in package.json?
Keep in mind that, to enable Netlify Functions you have to use netlify-cli, i.e. ntl command to run on local dev server.
No more specific configuration, just follow docs, and use that simple netlify/functions/hello.js example.
Then run using ntl dev, you function will be avaiable on /.netlify/functions/hello. easy.

SyntaxError: Unexpected token ':' when run migration

I have an enviments.ts file.
class Environment {
get databaseName(): string {
return (
process.env.MODE === 'dev' ?
process.env.dataBaseDev :
process.env.dataBaseProd
);
}
}
export const Environments = new Environment();
and also migrate-mongo-config.js file in my root.
const Environments = require("./src/environment/environment.ts")
const config = {
mongodb: {
url: 'mongodbUrl',
databaseName: Environments.databaseName,
options: {
useNewUrlParser: true,
useUnifiedTopology: true,
}
},
migrationsDir: "migrations",
migrationFileExtension: ".js"
};
module.exports = config;
when I run my migration I have this error ERROR. also added when I write the database name as static it works but when I want to manage it dynamically I have this error
Unexpected token ':' /Users/x/Desktop/v/x-backend/src/environment/environment.ts:2
get databaseName(): string {
^
SyntaxError: Unexpected token ':'
how can I solve this problem?
I believe the answer from Vahid Najafi is likely your problem candidate. Unfortunately his answer is very terse. For those starting to learn TypeScript, please allow me to elaborate on his answer.
When configuring your environment to use TypeScript you can enable a separation between TypeScript source code files and compiled output javascript code. The TypeScript compiler will read the source .ts files and output to .js files. It is generally recommended to keep these two collections of files in separate directories to allow building a deployment. To specify these different directories, you can modify your tsconfig.json file to specify the value for outDir and rootDir.
When you compile your .ts files using the command tsc the typescript files with type specific language will be converted to non-type-specific .js files. When considering your specific problem we see the file migrate-mongo-config.js refers to file ./src/environment/environment.ts. The file ./src/environment/environment.ts is a sourcecode file, not the compiled version. During runtime the program is expecting to refer to a javascript file, not a .ts file. So the corrective action is to modify the file migrate-mongo-config.js. Assuming you have configured your outDir to be ./dist the first line in the migrate-mongo-config.js should be altered.
Updated Version of migrate-mongo-config.js`
const Environments = require("./dist/environment/environment.js")
const config = {
mongodb: {
url: 'mongodbUrl',
databaseName: Environments.databaseName,
options: {
useNewUrlParser: true,
useUnifiedTopology: true,
}
},
migrationsDir: "migrations",
migrationFileExtension: ".js"
};
module.exports = config;
Notice how the first line value ./src/... was changed to ./dist/...? Notice how the file extension was changed from .ts to .js? Vahid's example removes the extension altogether. I have not tried this so I am uncertain as to it's behavior.
If you find this fixes your issue, please award the points to Vahid Najafi as this is his original idea. I am merely filling in some of the beginner topics to round out the conversation.
You should require from dist (js):
const Environments = require("./dist/environment/environment")

How to setup gulp-watch with gulp-connect livereload?

Most questions and answers on this site do not contain an easy-to follow general approach to using these two libraries together.
So, being that we use the gulp-connect npm package, and we want to make use of the gulp-watch npm package, how do we set it up so that we can:
watch changes in some files
perform some operation, like building / compiling those files
live-reload the server once the building is done
First, you will define your build task. This can have pre-required tasks, can be a task of some sort, it doesn't matter.
gulp.task('build', ['your', 'tasks', 'here']);
Then, you will need to activate the connect server. It is important that you are serving the result of the compilation (in this example, the dist directory) and you're enabling livereload with the livereload: true parameter.
const connect = require('gulp-connect');
gulp.task('server', function() {
return connect.server({
root: 'dist',
livereload: true
});
});
Finally, you will setup your watch logic. Note that we're using watch and not gulp.watch. If you decide to change it, notice that their APIs are different and they have different capabilities. This example uses gulp-watch.
const watch = require('gulp-watch');
gulp.task('watch-and-reload', ['build'], function() {
watch(['src/**'], function() {
gulp.start('build');
}).pipe(connect.reload());
});
gulp.task('watch', ['build', 'watch-and-reload', 'server']);
The watch-and-reload task will depend on the build task, so that it ensures to run at least one build.
Then, it will watch for your source files, and in the callback, it will start the build task. This callback gets executed every time that a file is changed in the directory. You could pass an options object to the watch method to be more specific. Check the usage API in their repository.
Also, you will need to start the build action, for which we're using gulp.start. This is not the recommended approach, and will be deprecated eventually, but so far it works. Most questions with these issues in StackOverflow will look for an alternative workaround that changes the approach. (See related questions.)
Notice that gulp.start is called synchronously. This is what you want, since you want to allow the build task to finish before you proceed with the event stream.
And finally, you can use the event stream to reload the page. The event stream will correctly capture what files changed and will reload those.
Bringing up to speed, as per current stable gulp release
gulp.task API isn't the recommended pattern anymore. Use exports object to make public tasks
From official documentation: https://gulpjs.com/docs/en/api/task#task
To Configure watch and livereload you need following
gulp.watch
gulp-connect
watch function is available in gulp module itself
install gulp-connect using npm install --save-dev gulp-connect
To configure gulp-connect server for livereload we need to set property livereload to true
Run all tasks followed by task that calls watch function in which globs and task are given. Any changes to files that match globs trigger task passed to watch().
task passed to watch() should signal async complection else task will not be run a second time. Simple works: should call callback or return stream or promise
Once watch() is configured, append .pipe(connect.reload()) followed by pipe(dest(..)) where ever you think created files by dest are required to reload
Here is simple working gulpfile.js with connect lifereload
const {src, dest, watch, series, parallel } = require("gulp");
const htmlmin = require("gulp-htmlmin");
const gulpif = require("gulp-if");
const rename = require('gulp-rename');
const connect = require("gulp-connect");
//environment variable NODE_ENV --> set NODE_ENV=production for prouduction to minify html and perform anything related to prod
mode = process.env.NODE_ENV || 'dev';
var outDir = (mode != 'dev') ? 'dist/prod': 'dist/';
const htmlSources = ['src/*.html'];
function html() {
return src(htmlSources)
.pipe(gulpif(
mode.toLowerCase() != 'dev',
htmlmin({
removeComments: true,
collapseWhitespace: true,
minifyCSS: true,
minifyJS: true
})
)
)
.pipe(dest(outDir))
.pipe(connect.reload());
}
function js(){
return src('src/*.js')
.pipe(uglify())
.pipe(rename({ extname: '.min.js' }))
.pipe(dest(outDir))
.pipe(connect.reload());
}
function server() {
return connect.server({
port: 8000,
root: outDir,
livereload: true
})
}
function watchReload() {
let tasks = series(html, js);
watch(["src/**"], tasks);
}
exports.html = html;
exports.js = js;
exports.dev = parallel(html, js, server, watchReload);
Configure connect server with livereload property
function server() {
return connect.server({
port: 8000,
root: outDir,
livereload: true //essential for live reload
})
}
Notice .pipe(connect.reload()) in the above code. It is essential that stream of required files to be piped to connect.reload() else it may not work if you call connect.reload() arbitrarily
function html() {
return src(htmlSources)
.pipe(gulpif(
mode.toLowerCase() != 'dev',
htmlmin({
removeComments: true,
collapseWhitespace: true,
minifyCSS: true,
minifyJS: true
})
)
)
.pipe(dest(outDir))
.pipe(connect.reload()); //Keep it if you want livereload else discard
}
Since we configure public task dev following command will execute all tasks followed by connect and watchReload
gulp dev

Resources