Pubsub between two nodes in IPFS - node.js

I'm trying to send messages between two IPFS nodes.
The daemon that I'm running is based on go-ipfs, and is running with the flag:
ipfs daemon --enable-pubsub-experiment
I've coded two .js files, one is for the subscriber:
const IPFS = require('ipfs')
const topic = 'topic';
const Buffer = require('buffer').Buffer;
const msg_buffer = Buffer.from('message');
const ipfs = new IPFS({
repo: repo(),
EXPERIMENTAL: {
pubsub: true
},
config: {
Addresses: {
Swarm: [
'/dns4/ws-star.discovery.libp2p.io/tcp/443/wss/p2p-websocket-star'
]
}
}
})
ipfs.once('ready', () => ipfs.id((err, info) => {
if (err) { throw err }
console.log('IPFS node ready with address ' + info.id)
subscribeToTopic()
}))
function repo () {
return 'ipfs-' + Math.random()
}
const receiveMsg = (msg) => {
console.log(msg.data.toString())
}
const subscribeToTopic = () => {
ipfs.pubsub.subscribe(topic, receiveMsg, (err) => {
if (err) {
return console.error(`failed to subscribe to ${topic}`, err)
}
console.log(`subscribed to ${topic}`)
})
}
And one is for the publisher:
const IPFS = require('ipfs');
const topic = 'topic';
const Buffer = require('buffer').Buffer;
const msg_buffer = Buffer.from('message');
const ipfs = new IPFS({
repo: repo(),
EXPERIMENTAL: {
pubsub: true
},
config: {
Addresses: {
Swarm: [
'/dns4/ws-star.discovery.libp2p.io/tcp/443/wss/p2p-websocket-star'
]
}
}
})
ipfs.once('ready', () => ipfs.id((err, info) => {
if (err) { throw err }
console.log('IPFS node ready with address ' + info.id)
publishToTopic()
}))
function repo () {
return 'ipfs-' + Math.random()
}
const publishToTopic = () => {
ipfs.pubsub.publish(topic, msg_buffer, (err) => {
if (err) {
return console.error(`failed to publish to ${topic}`, err)
}
// msg was broadcasted
console.log(`published to ${topic}`)
console.log(msg_buffer.toString())
})
}
I've runned the .js scripts with:
node file.js
But the subscriber didn't receive any message from the subscriber and I don't know why.
What is the correct way to connect two nodes in this case?

Maybe I'm wrong but the npm package ipfs is an entire implementation of IPFS protocol and it creates a node when the constructor is called, that's why ipfs daemon ... is not necesary. If you need to use it as API with the ipfs daemon you can use the ipfs-http-client package.
You can use ipfs-pubsub-room and it has a working example based on this package ipfs-pubsub-room-demo.
I hope it helps, I'm still learning this tech too.

Currently (2019-09-17) most nodes in the ipfs network don't have pubsub enabled, so chances your pubsub messages will pass through are slim.
You can try to establish direct connection between your nodes, as explained here:managing swarm connections in ipfs
Essentially:
Run "ipfs id" on internet accessible node
Inspect output, get the address (it should look like this /ip4/207.210.95.74/tcp/4001/ipfs/QmesRgiWSBeMh4xbUEHUKTzAfNqihr3fFhmBk4NbLZxXDP
)
On the other node establish direct connection:
ipfs swarm connect /ip4/207.210.95.74/tcp/4001/ipfs/QmesRgiWSBeMh4xbUEHUKTzAfNqihr3fFhmBk4NbLZxXDP

Please see the ipfs Github example, as it shows how to connect two js-ipfs browser nodes together via WebRTC.

Related

webRTC connections doesn't work all the time

I'm new to webrtc so and i'm using the simple-peer package ! and i'm working on like a discord clone where someone opens a room then other people can go and join ! sometimes the connections between them work perfectly fine ! and sometimes it works just from one side or not work at all !
this is the peer config that i'm using
export const prepareNewPeerConnection = (connUserSocketId, isInitiator) => {
const localStream = store.getState().room.localStream;
if (isInitiator) {
console.log("preparing as initiator");
} else {
console.log("preparing not as an initiator");
}
peers[connUserSocketId] = new Peer({
initiator: isInitiator,
config: {
iceServers: [{ urls: "stun:stun.l.google.com:19302" }],
},
stream: localStream,
});
peers[connUserSocketId].on("signal", (data) => {
const signalData = {
signal: data,
connUserSocketId,
};
signalPeerData(signalData);
});
peers[connUserSocketId].on("stream", (remoteStream) => {
remoteStream.connUserSocketId = connUserSocketId;
addNewRemoteStream(remoteStream);
});
};
i have seen people talking about the turn and stun servers which i don't really understand how they work ! can they be the problem in here ?

Bi-directional Websocket with RTK Query

I'm building a web-based remote control application for the music program Ableton Live. The idea is to be able to use a tablet on the same local network as a custom controller.
Ableton Live runs Python scripts, and I use this library that exposes the Ableton Python API to Node. In Node, I'm building an HTTP/Websocket server to serve my React frontend and to handle communication between the Ableton Python API and the frontend running Redux/RTK Query.
Since I both want to send commands from the frontend to Ableton Live, and be able to change something in Ableton Live on my laptop and have the frontend reflect it, I need to keep a bi-directional Websocket communication going. The frontend recreates parts of the Ableton Live UI, so different components will care about/subscribe to different small parts of the whole Ableton Live "state", and will need to be able to update just those parts.
I tried to follow the official RTK Query documentation, but there are a few things I really don't know how to solve the best.
RTK Query code:
import { createApi, fetchBaseQuery } from '#reduxjs/toolkit/query/react';
import { LiveProject } from '../models/liveModels';
export const remoteScriptsApi = createApi({
baseQuery: fetchBaseQuery({ baseUrl: 'http://localhost:9001' }),
endpoints: (builder) => ({
getLiveState: builder.query<LiveProject, void>({
query: () => '/completeLiveState',
async onCacheEntryAdded(arg, { updateCachedData, cacheDataLoaded, cacheEntryRemoved }) {
const ws = new WebSocket('ws://localhost:9001/ws');
try {
await cacheDataLoaded;
const listener = (event: MessageEvent) => {
const message = JSON.parse(event.data)
switch (message.type) {
case 'trackName':
updateCachedData(draft => {
const track = draft.tracks.find(t => t.trackIndex === message.id);
if (track) {
track.trackName = message.value;
// Components then use selectFromResult to only
// rerender on exactly their data being updated
}
})
break;
default:
break;
}
}
ws.addEventListener('message', listener);
} catch (error) { }
await cacheEntryRemoved;
ws.close();
}
}),
})
})
Server code:
import { Ableton } from 'ableton-js';
import { Track } from 'ableton-js/ns/track';
import path from 'path';
import { serveDir } from 'uwebsocket-serve';
import { App, WebSocket } from 'uWebSockets.js';
const ableton = new Ableton();
const decoder = new TextDecoder();
const initialTracks: Track[] = [];
async function buildTrackList(trackArray: Track[]) {
const tracks = await Promise.all(trackArray.map(async (track) => {
initialTracks.push(track);
// A lot more async Ableton data fetching will be going on here
return {
trackIndex: track.raw.id,
trackName: track.raw.name,
}
}));
return tracks;
}
const app = App()
.get('/completeLiveState', async (res, req) => {
res.onAborted(() => console.log('TODO: Handle onAborted error.'));
const trackArray = await ableton.song.get('tracks');
const tracks = await buildTrackList(trackArray);
const liveProject = {
tracks // Will send a lot more than tracks eventually
}
res.writeHeader('Content-Type', 'application/json').end(JSON.stringify(liveProject));
})
.ws('/ws', {
open: (ws) => {
initialTracks.forEach(track => {
track.addListener('name', (result) => {
ws.send(JSON.stringify({
type: 'trackName',
id: track.raw.id,
value: result
}));
})
});
},
message: async (ws, msg) => {
const payload = JSON.parse(decoder.decode(msg));
if (payload.type === 'trackName') {
// Update track name in Ableton Live and respond
}
}
})
.get('/*', serveDir(path.resolve(__dirname, '../myCoolProject/build')))
.listen(9001, (listenSocket) => {
if (listenSocket) {
console.log('Listening to port 9001');
}
});
I have a timing issue where the server's ".ws open" method runs before the buildTrackList function is done fetching all the tracks from Ableton Live. These "listeners" I'm adding in the ws-open-method are callbacks that you can attach to stuff in Ableton Live, and the one in this example will fire the callback whenever the name of a track changes. The first question is if it's best to try to solve this timing issue on the server side or the RTK Query side?
All examples I've seen on working with Websockets in RTK Query is about "streaming updates". But since the beginning I've thought about my scenario as needing bi-directional communication using the same Websocket connection the whole application through. Is this possible with RTK Query, and if so how do I implement it? Or should I use regular query endpoints for all commands from the frontend to the server?

Multiple simultaneous Node.js data requests occasionally resulting in "aborted" error (Express, React)

I have a web application using a front end of React and a backend of Node.js (connecting to a MS SQL database.)
In the application, on each page load, the frontend sends a few requests (via Axios) to the API backend on the server. Most of the time (95%) they all process flawlessly, but maybe 5% of the time, it results in an "Aborted" message and the application server returns a 500 error. Sometimes these requests are very small amounts of data (like a count query with only a few numbers returned, much less than 1KB - so size isn't the problem).
It seems that somehow the browser is telling the server "oh, actually I need this" and the server cancels it's previous results and works on the next request. But most of the time they all get returned.
Here's a sample of the React context:
import React, { useCallback, createContext } from 'react'
import axios from 'axios'
import { useSnackbar } from 'notistack'
export const PlanContext = createContext()
export default function PlanProvider(props) {
const { enqueueSnackbar } = useSnackbar()
const [sampleData, setSampleData] = useState([])
const sampleRequest = useCallback(
async (dateInput) => {
try {
const { data } = await axios.get(`/api/sample`, {
params: { dateInput: dateInput, },
})
setSampleData(data)
} catch (error) {
enqueueSnackbar(`Error: ${error.message}`, { variant: 'error' })
}
}, [enqueueSnackbar])
return (
<Plan.Provider
value={{
sampleRequest,
sampleData,
}}
>
{props.children}
</Plan.Provider>
)
}
And here's a sample of the Node.JS Controller:
const sql = require('mssql')
const config = require('../config/db')
async function sampleRequest(req, res) {
const { dateInput } = req.query
let pool
try {
pool = await sql.connect(config)
const {recordset} = await pool.request()
.input('dateInput', sql.Date, dateInput).query`
SELECT * FROM DATATABLE WHERE StatusDate = #dateInput
`
res.json(recordset)
} catch (error) {
console.log('ERROR: ', error.message, new Date())
res.status(500).json({message: error.message})
} finally {
if (pool) {
try {
await pool.close()
} catch (err) {
console.error("Error closing connection: ",err);
}
}
}
}
module.exports = {
sampleRequest
}
And there's multiple contexts and multiple controllers pulling various pieces of data.
And here's an example of the error logged on the Node.JS server:
And in the Browser console (Chrome Developer Tools):
Is there something I have mixed up with the async / await setup? I can usually re-create the error after a bit by continually refreshing the page (F5).
For those looking for the answer , I had a similar problem and I believe it is because you are opening and closing the connection pool on every request or call of your function. Instantiate the connection pool outside of the function then call pool.request(). Common practice I've seen is to have the connection pool in your db file and export the pool object and then require it in your routes.

Web3 how to keep connection to web socket

Im trying to listen to Transfer events but it works for couple of minutes then process terminates. I believe thats because of the blockchain node I use but not sure. Can't find anything other so.
How can I keep the connection and Listen to Transfer events 24/7
const web3 = new Web3(new Web3.providers.WebsocketProvider('wss://bsc-ws-node.nariox.org:443'))
const contract = await new web3.eth.Contract(
ABI,
contracts[0]
)
contract.events
.Transfer({
fromBlock: 'latest',
filter: { from: contracts[1] }
})
.on('data', async (event: EventData) => {
const {
transactionHash,
returnValues: { value }
} = event
....
})

socket io on sails js as API and node+react as Frontend

I have an API build using sailsjs and a react redux attach to a nodejs backend, and i am trying to implement socket.io for a realtime communication, how does this work?
is it
socket.io client on the react side that connects to a socket.io server on its nodejs backend that connects to a socket.io server on the API
socket.io client on the react side and on its nodejs backend that connects to a socket.io server on the API
i have tried looking around for some answers, but none seems to meet my requirements.
to try things out, i put the hello endpoint on my API, using the sailsjs realtime documentation, but when i do a sails lift i got this error Could not fetch session, since connecting socket has no cookie (is this a cross-origin socket?) i figure that i need to pass an auth code inside the request headers Authorization property.
Assuming i went for my #1 question, and by using redux-socket.io,
In my redux middleware i created a socketMiddleware
import createSocketIoMiddleware from 'redux-socket.io'
import io from 'socket.io-client'
import config from '../../../config'
const socket = io(config.host)
export default function socketMiddleware() {
return createSocketIoMiddleware(
socket,
() => next => (action) => {
const { nextAction, shuttle, ...rest } = action
if (!shuttle) {
return next(action)
}
const { socket_url: shuttleUrl = '' } = config
const apiParams = {
data: shuttle,
shuttleUrl,
}
const nextParams = {
...rest,
promise: api => api.post(apiParams),
nextAction,
}
return next(nextParams)
},
)
}
and in my redux store
import { createStore, applyMiddleware, compose } from 'redux'
import createSocketIoMiddleware from 'redux-socket.io'
...
import rootReducers from '../reducer'
import socketMiddleware from '../middleware/socketMiddleware'
import promiseMiddleware from '../middleware/promiseMiddleware'
...
import config from '../../../config'
export default function configStore(initialState) {
const socket = socketMiddleware()
...
const promise = promiseMiddleware(new ApiCall())
const middleware = [
applyMiddleware(socket),
...
applyMiddleware(promise),
]
if (config.env !== 'production') {
middleware.push(DevTools.instrument())
}
const createStoreWithMiddleware = compose(...middleware)
const store = createStoreWithMiddleware(createStore)(rootReducers, initialState)
...
return store
}
in my promiseMiddleware
export default function promiseMiddleware(api) {
return () => next => (action) => {
const { nextAction, promise, type, ...rest } = action
if (!promise) {
return next(action)
}
const [REQUEST, SUCCESS, FAILURE] = type
next({ ...rest, type: REQUEST })
function success(res) {
next({ ...rest, payload: res, type: SUCCESS })
if (nextAction) {
nextAction(res)
}
}
function error(err) {
next({ ...rest, payload: err, type: FAILURE })
if (nextAction) {
nextAction({}, err)
}
}
return promise(api)
.then(success, error)
.catch((err) => {
console.error('ERROR ON THE MIDDLEWARE: ', REQUEST, err) // eslint-disable-line no-console
next({ ...rest, payload: err, type: FAILURE })
})
}
}
my ApiCall
/* eslint-disable camelcase */
import superagent from 'superagent'
...
const methods = ['get', 'post', 'put', 'patch', 'del']
export default class ApiCall {
constructor() {
methods.forEach(method =>
this[method] = ({ params, data, shuttleUrl, savePath, mediaType, files } = {}) =>
new Promise((resolve, reject) => {
const request = superagent[method](shuttleUrl)
if (params) {
request.query(params)
}
...
if (data) {
request.send(data)
}
request.end((err, { body } = {}) => err ? reject(body || err) : resolve(body))
},
))
}
}
All this relation between the middlewares and the store works well on regular http api call. My question is, am i on the right path? if i am, then what should i write on this reactjs server part to communicate with the api socket? should i also use socket.io-client?
You need to add sails.io.js at your node server. Sails socket behavior it's quite tricky. Since, it's not using on method to listen the event.
Create sails endpoint which handle socket request. The documentation is here. The documentation is such a pain in the ass, but please bear with it.
On your node server. You can use it like
import socketIOClient from 'socket.io-client'
import sailsIOClient from 'sails.io.js'
const ioClient = sailsIOClient(socketIOClient)
ioClient.sails.url = "YOUR SOCKET SERVER URL"
ioClient.socket.get("SAILS ENDPOINT WHICH HANDLE SOCKET", function(data) {
console.log('Socket Data', data);
})

Resources