Including runfiles in a filegroup - node.js

Im new to Bazel.
I thought id start by trying to build a simple nodejs project, it uses babel to do some transforming as part of the build process, the issue im having is I cant seem to find a way to get these transformed files into a filegroup.
Here's my BUILD file.
load("#build_bazel_rules_nodejs//:defs.bzl", "nodejs_binary")
load("#bazel_tools//tools/build_defs/pkg:pkg.bzl", "pkg_tar")
# Group all our initial code.
filegroup(
name = "src",
srcs = [
".babelrc",
"package.json",
"//config:src",
"//handlers:src",
"//migrations:src",
"//models:src",
"//services:src",
"//tasks:src",
"#dependencies//:node_modules",
],
)
# Group all our generated code.
filegroup(
name = "out",
srcs = [
"//:babel:runfiles" ### ???
],
)
nodejs_binary(
name = "babel",
entry_point = "babel-cli/bin/babel.js",
templated_args = [
".",
"--ignore node_modules,test/,migrations/,babel_bin_loader.js",
"-d out",
"--source-maps=both",
"--copy-files",
],
node_modules = "#nodejs_build_tools//:node_modules",
data = [
"//:src",
]
)
pkg_tar(
name = "build",
strip_prefix = "/",
package_dir = "/usr/src/app",
srcs = ["//:out"],
mode = "0755",
)
My issue is that im not sure how to reference the runfiles from my nodejs_binary rule.
https://github.com/bazelbuild/rules_nodejs/blob/master/internal/node/node.bzl#L130
Seems to indicate that there should be a :runfiles attribute or similar?
Thanks! :)

So turns out that the correct way to do this appears to be by using a genrule to actually call the configured nodejs binary. Eg.
## Artifact Construction ##
genrule(
name = "construct_artifact",
outs = ["artifact.tar"],
cmd = """./$(location babel) . --ignore bazel-
out,node_modules,text/,migrations/ -d out/ --source-maps=both --copy-files && tar cvf $# out/ """,
srcs = [
"//:src",
],
tools = [
"//:babel",
]
)

Related

How to change `DIRS` path dynamically in Django?

I want to change DIRS dynamically.based on devices.
if request.user_agent.is_pc:
request.template_prefix = 'desktop'
else:
request.template_prefix = 'mobile'
Default (settings.py):
TEMPLATES = [
{
'DIRS': ['templates'],
},
]
I want to change my DIRS path like this (settings.py):
TEMPLATES = [
{
'DIRS': [f"templates/{request.template_prefix}"],
},
]
Also let me know if you need more codes.
Note: I can't use user_agent in settings.py Because it requires a request.
that's why I asked.
My django version is: 3.2.x
In simple words: How to change DIRS path in views.py.
Thanks!

Issues - Extracting satellite image using sentinelsat

I am using the below code to extract satellite image using sentinelsat
from sentinelsat import SentinelAPI, read_geojson, geojson_to_wkt
from datetime import date
api = SentinelAPI('userid', 'password', 'https://scihub.copernicus.eu/dhus')
footprint = geojson_to_wkt(read_geojson('./map.geojson'))
products = api.query(footprint,
date = ('20180101', '20191010'),
platformname = 'Sentinel-2',
processinglevel = 'Level-2A',
cloudcoverpercentage = (0,3)
)
products_gdf = api.to_geodataframe(products)
products_gdf_sorted = products_gdf.sort_values(['cloudcoverpercentage'], ascending=[True])
products_gdf_sorted.index
api.download('fed1003b-effa-41f5-9079-5d017af0eea2')
I have ensured that my geojson is pointing to the correct position.
{
"type": "Polygon",
"coordinates": [
[
[
80.265872,13.064500
],
[
80.265526,13.064076
],
[
80.266435,13.064190
],
[
80.265872,13.064500
]
]
]
}
I am expecting building to be part of my tiff file. But I am just getting green patch as the output.
I am using the following code to create the tiff file.
import rasterio as rio
R10 = '.\S2A_MSIL2A_20190511T045701_N0212_R119_T44PMV_20190511T104102.SAFE\GRANULE\L2A_T44PMV_A020279_20190511T050605\IMG_DATA\R10m'
b4 = rio.open(R10+'\T44PMV_20190511T045701_B04_10m.jp2' , driver='JP2OpenJPEG')
b3 = rio.open(R10+'\T44PMV_20190511T045701_B03_10m.jp2' , driver='JP2OpenJPEG')
b2 = rio.open(R10+'\T44PMV_20190511T045701_B02_10m.jp2', driver='JP2OpenJPEG')
with rio.open('RGB.tiff','w',driver='Gtiff', width=b4.width, height=b4.height,
count=3,crs=b4.crs,transform=b4.transform, dtype=b4.dtypes[0]) as rgb:
rgb.write(b2.read(1),1)
rgb.write(b3.read(1),2)
rgb.write(b4.read(1),3)
rgb.close()
Where am I going wrong, whether I am missing any parameter while doing the extraction or my expectation is wrong. Kindly clarify. Thanks in advance.

How can I install extension of vscode?

This is a beginner question. So there is a package vscode-with-extensions.
The package says:
A set of vscode extensions to be installed alongside the editor. Here's a an example:
vscode-with-extensions.override {
# When the extension is already available in the default extensions set.
vscodeExtensions = with vscode-extensions; [
bbenoist.Nix
]
# Concise version from the vscode market place when not available in the default set.
++ vscode-utils.extensionsFromVscodeMarketplace [
{
name = "code-runner";
publisher = "formulahendry";
version = "0.6.33";
sha256 = "166ia73vrcl5c9hm4q1a73qdn56m0jc7flfsk5p5q41na9f10lb0";
}
];
}
Where in configuration.nix do I have to put this expression? I already have
environment.systemPackages = with pkgs; [
wget
vim
vscode-with-extensions
];
therein.
You’re supposed to use it as in the configuration.nix directly, like for instance
environment.systemPackages = with pkgs; [
wget
vim
(vscode-with-extensions.override {
# When the extension is already available in the default extensions set.
vscodeExtensions = with vscode-extensions; [
bbenoist.Nix
]
# Concise version from the vscode market place when not available in the default set.
++ vscode-utils.extensionsFromVscodeMarketplace [
{
name = "code-runner";
publisher = "formulahendry";
version = "0.6.33";
sha256 = "166ia73vrcl5c9hm4q1a73qdn56m0jc7flfsk5p5q41na9f10lb0";
}
];
})
];
Or, in a more readable version:
environment.systemPackages = with pkgs;
let
vcsodeWithExtension = vscode-with-extensions.override {
# When the extension is already available in the default extensions set.
vscodeExtensions = with vscode-extensions; [
bbenoist.Nix
]
# Concise version from the vscode market place when not available in the default set.
++ vscode-utils.extensionsFromVscodeMarketplace [
{
name = "code-runner";
publisher = "formulahendry";
version = "0.6.33";
sha256 = "166ia73vrcl5c9hm4q1a73qdn56m0jc7flfsk5p5q41na9f10lb0";
}
];
})
in
[
wget
vim
vcsodeWithExtension
];
So, apparently it can go directly into environment.systemPackages, but requires parentheses:
environment.systemPackages = with pkgs; [
wget
vim
(vscode-with-extensions.override {
vscodeExtensions = with vscode-extensions; [
bbenoist.Nix
];
})
];

node-gyp: run binding.gyp in all subdirectories

I'm developing a big node.js project which also includes several native libraries.
To use these libraries in JavaScript I'm compiling them to node addons (.node) using node-gyp.
I'd like to run node-gyp once from the root directory to compile all the available binding.gyp recursively (in all the subdirectories).
Is there any way to do that?
GYP allows to set a list of dependencies for a target. You can create a target of type: none in the top-level bindings.gyp and list there dependencies from subdirectories:
{
'targets': [
{
'target_name': 'build_all',
'type': 'none',
'dependencies': ['subdir1/bindings.gyp:*', 'subdir/subdir2/bindings.gyp:*'],
# or generate dependencies list with a command expansion
'dependencies': ['<!#(find -mindepth 2 -name binding.gyp | sed -e s/$/:*/)'],
}
]
}
This will compile all the dependencies and put them into build/ directory in the root.
For putting each addon in its corresponding directory, add a postbuild target inside the addon's binding.gyp:
{
"targets": [
{
"target_name": "my-target",
"sources": [ "example.cpp" ]
},
{
"target_name": "action_after_build",
"type": "none",
"dependencies": [ "my-target" ],
"copies": [
{
"files": [ "<(PRODUCT_DIR)/my-target.node" ],
"destination": "."
}
]
}
]
}
I didn't find any option to do this with just node-gyp, but one of the possible solutions is doing this in a script.
For example, adding the following to the package.json in the root folder:
"scripts": {
"install": "find ./app/* -name binding.gyp -execdir node-gyp rebuild ;"
}
This will cause all the native addons to compile when running npm install in the root folder.
An alternative to the other answers which seems to work so far (without ever having to update binding.gyp):
{
"targets": [
{
"target_name": "addon",
"sources": [
"<!#(node -p \"var fs=require('fs'),path=require('path'),walk=function(r){let t,e=[],n=null;try{t=fs.readdirSync(r)}catch(r){n=r.toString()}if(n)return n;var a=0;return function n(){var i=t[a++];if(!i)return e;let u=path.resolve(r,i);i=r+'/'+i;let c=fs.statSync(u);if(c&&c.isDirectory()){let r=walk(i);return e=e.concat(r),n()}return e.push(i),n()}()};walk('./sources').join(' ');\")"
]
}
]
}
(from https://stackoverflow.com/a/60947528/2016831)

Migrating from WAF to GYP - trouble including library

I'm migrating an out of date npm package from WAF to GYP, but having a few problems getting everything working. It runs a WSCRIPT which seems to include a 3rd party library:
import Options
from os import unlink, symlink, popen, sys
from os.path import exists
srcdir = '.'
blddir = 'build'
VERSION = '0.0.2'
def set_options(opt):
opt.tool_options('compiler_cxx')
def configure(conf):
conf.check_tool('compiler_cxx')
conf.check_tool('node_addon')
print(sys.platform)
if sys.platform == 'darwin':
conf.check_tool('osx')
tc_framework = 'TelldusCore'
conf.env.append_value("FRAMEWORK_TC", tc_framework)
tc_frameworkpath = '/Library/Frameworks/TelldusCore.framework/'
conf.env.append_value("FRAMEWORKPATH_TC", tc_frameworkpath)
tc_lib = tc_frameworkpath + 'Headers/'
conf.env.append_value("CPPPATH_TC", tc_lib)
elif sys.platform == 'linux2':
conf.env.LIB_TC = 'telldus-core'
#conf.env.LIBPATH_TC = ['/usr/lib']
#conf.env.CCFLAGS_TC = ['-O0']
conf.env.CCDEFINES_TC = ['TC']
#conf.env.LINKFLAGS_TC = ['-g']
else:
raise ValueError("Dose not support: %r" % sys.platform)
def build(bld):
obj = bld.new_task_gen('cxx', 'shlib', 'node_addon')
obj.target = 'telldus'
obj.source = 'telldus.cc'
obj.uselib = "TC"
Now I've tried to convert it to a binding.gyp script, but not sure how to include the library:
{
"targets": [
{
"target_name": "tellduscorejs2",
"sources": [ "tellduscorejs2.cpp" ],
"conditions": [
['OS=="mac"', {
'defines': [
'FRAMEWORK_TC=TelldusCore',
'FRAMEWORKPATH_TC="/Library/Frameworks/TelldusCore.framework/"',
'CPPPATH_TC="/Library/Frameworks/TelldusCore.framework/Headers/"'
]
}],
['OS=="linux"', {
'defines': [
'LIB_TC=telldus-core',
'CCDEFINES_TC=TC'
]
}]
],
'link_settings': {
'libraries': [
???
],
},
}
]
}
If anyone could point out if I'm on the right lines or what I need to change to include the library it'd be appreciated!
I've actually done this for the telldus-core project. See https://github.com/marchaos/telldus-core-js/
i've also added events for devices and sensors.

Resources