In an Express-based app, I'm running an async function to fetch image source from a database and render them into a Nunjucks view engine
Nunjucks is rendering the following in the DOM
<img src="[object Promise]">
I have already enabled Nunjucks's async rendering with web: { async: true } and enabled the nunjucks async api with a callback like so
// controller.js (KeystoneJS app)
view.render('index', function (err, res) {
console.log('err at index render', err); // undefined
return res;
});
How can i get the resolved value of my async function?
As I understand it, Nunjucks doesn't support asynchronous render directly. You can use asynchronous filters to get it. Maybe I'm wrong.
Imho, use feature with Be Careful! mark is a not good idea.
// template.njk
Hello {{user_id | find | attr('name') }}!
// app.js
var nunjucks = require('nunjucks');
var env = nunjucks.configure();
// Async filter
env.addFilter('find', function(a, cb) {
setTimeout(function () {
cb(null, {
name: 'Smith'
});
}, 10);
}, true)
// Sync filter
env.addFilter('attr', function(obj, attr) {
return obj && attr && obj[attr];
});
env.render('template.njk',
{user_id: 1}, // pass sync vars
function(err, res) {
if (err)
return;
console.log(res);
return res
}
);
I don't know about nunjucks, but you can implement async functionality regardless of the view engine being used. To show the idea, I tried to reproduce your situation. I created an HTML file named index.html with an img tag without any src attribute:
<html>
<head>
<meta charset="utf-8"/>
<meta name="viewport" content="width=device-width, initial-scale=1.0"/>
<meta http-equiv="X-UA-Compatible" content="IE=edge"/>
<title>Reproduce</title>
</head>
<body>
<img id='bg'></img>
<script src='./so.js'></script>
</body>
</html>
I have a <script> tag in my HTML which links to my so.js file shown below by which an image is requested by sending a HTTP request to NodeJS/Express server:
getImage();
function getImage(){
// Get an image by its name as URL parameter
fetch('/bg/background.jpg',{
method: 'GET',
headers: {
'Content-Type': 'application/json'
}
}).then(result=>{
return result.blob()
}).then(result=>{
console.log('result -> ', result)
document.querySelector('#bg').src=URL.createObjectURL(result)
document.querySelector('#bg').style.width='200px'
}).catch(err=>{
console.log('err -> ', err)
})
}
Here is my NodeJS/ExpressJS code inside a file name server.js:
express=require('express')
bodyParser=require('body-parser')
path=require('path')
fetch=require('node-fetch')
server=express()
//Body-parser middleware
server.use(bodyParser.json())
server.use(bodyParser.urlencoded({extended:false}))
server.use(bodyParser.raw())
//Set static path
server.use(express.static(path.join(__dirname,'public')))
server.get('/',(req,res)=>{
res.render('index.html')
})
// Pick a file on hard disk
// and send it as "Blob" to the browser
server.get('/bg/:name',(req,res)=>{
var options = {
root: __dirname + '/public/',
dotfiles: 'deny',
headers: {
'x-timestamp': Date.now(),
'x-sent': true
}
};
var fileName = req.params.name;
res.sendFile(fileName, options, function (err) {
if (err) {
next(err);
} else {
console.log('Sent:', fileName);
}
});
})
server.listen('8000',()=>{
console.log('Server listening on port 8000...')
})
As can be seen, I'm doing my async communication between browser and server by implementing fetch API and without even touching the view engine.
I just wanted to provide an alternative idea to use fetch API and do the async HTTP communications with it regardless of any view rendering engine that is being used.
Related
My shopify app proxy information:
Subpath prefix: apps
Subpath: rma
Host: https://www.example.com/shopdevc/shopifyAPI/
If I query the host directly using https://www.example.com/shopdevc/shopifyAPI/apps/rma, it works great.
But, in my React function, querying "/apps/rma" returns
<!DOCTYPE html> <html lang="en"> <head> <script type="module" src="/#vite/client"></script> <script type="module"> import RefreshRuntime from "/#react-refresh" RefreshRuntime.injectIntoGlobalHook(window) window.$RefreshReg$ = () => {} window.$RefreshSig$ = () => (type) => type window.__vite_plugin_react_preamble_installed__ = true </script> <meta charset="UTF-8" /> </head> <body> <div id="app"><!--app-html--></div> <script type="module" src="/src/entry-client.jsx"></script> </body> </html>
Is there something wrong with my proxy url or path? My folder structure is
"c:\wwwroot\shopDevC\shopifyAPI\apps\rma\index.aspx"
where index.aspx is the default document.
My code:
function delayed_render(async_fun, deps=[]) {
const [output, setOutput] = useState();
useEffect(async () => setOutput(await async_fun()), deps);
return (output === undefined) ? null : output;
}
function TestAPI() {
return delayed_render(async () => {
// const resp = await fetch(`https://www.example.com/shopdevc/shopifyAPI/apps/rma`); // this works fine
const resp = await fetch(`/apps/rma`); //this does not work even though our proxy is set to https://www.example.com/shopdevc/shopifyAPI
const data = await resp.text();
return <div>
<h1> Fetch data from an api in react: {data} </h1>
</div>;
I too have been working on a similar problem today. Almost my exact search phrase Shopify proxy url not working but unproxied is working. This is not an answer to your problem but may help you and perhaps others may find this useful. It's the url resolution I think, not your react code.
My skeleton was built with the shopify-cli: shopify create app node.
What I saw in my browser developer window when loading https://{shop}.myshopify.com/{proxy_prefix}/{proxy_path}/{rest-of-path} was that assets were requested to the root. src="/src/entry-client.jsx" etc. Which was trying to load https://{shop}.myshopify.com/src/entry-client. My solution was to replace the root with the full url of my host in the server file server/index.js, . The following works for the compiled files in dist (isProd === true):
app.use("/*", (req, res, next) => {
let host = "";
let contentType = "text/html";
if (Object.keys(req.headers).includes("x-forwarded-for")) {
host = process.env.HOST; // where I actually am
contentType = "application/liquid"; // to use shop theme wrapper
};
let template = fs.readFileSync(
path.resolve(root, '/dist/client/index.html'),
'utf-8'
);
template = template.replace("/assets", `${host}/assets`); // correct url
res
.status(200)
.set("Content-Type", contentType)
.send(template);
});
For the development server (the !isProd section) more substitutions are required:
for (const part of ["/#", "/src"]) {
template = template.replace(part, `${host}${part}`)
};
And then to allow the string substitutions the vite.createServer instance needs to have middlewareMode: "ssr" (Server-Side Rendering) which meant that the react-refresh code was not injected. So I included this:
template = template.replace("<!-- react-refresh -->", `
<script type="module">
import RefreshRuntime from "${host}/#react-refresh"
RefreshRuntime.injectIntoGlobalHook(window)
window.$RefreshReg$ = () => {}
window.$RefreshSig$ = () => (type) => type
window.__vite_plugin_react_preamble_installed__ = true
</script>
`);
On a final note - I'm still trying to figure out how to make the socket __vite_ping go to my HOST.
Good luck. Ngā mihi nui.
I simply bypassed using the shopify proxy and rolled my own in index.js:
app.use(`/a/prx/*`, async function(req, res) {
const path = req.originalUrl.split(`/`).splice(-1).toString();
var options = {
method: req.method,
headers: {
'User-Agent': req.headers[`user-agent`],
"X-User-Agent": req.headers[`user-agent`],
"Shopify-API-Key": process.env.SHOPIFY_API_KEY,
"Shopify-API-Secret": process.env.SHOPIFY_API_SECRET
}
};
const response = await fetch(`${process.env.KP_SHOPIFY_API}${path}/`, options);
const data = await response.text();
res.send(data);
});
Im trying to refresh a clients webpage (using a router) and everywhere I look I see something along the lines of using res.redirect(same page link of some sort), however for some reason this isnt working for me, do you know of any alternatives?
my code looks something like this
router.post('/sendSnippet', function (req, res) {
req.on('data', function(data) {
User.findOne({email: req.user.email}).then((userToEdit) =>{
if(userToEdit){
var newSnippet = {
"types":[],
"code": data.toString()
}
userToEdit.snippets.push(newSnippet)
userToEdit.save().then(()=>{
//refresh here
res.redirect('/profile/');
})
}
})
})
});
thanks for any help in advance
Assuming you are trying to force-reload a client's browser tab for your website, I don't think you can do that server-side.
You can use meta http-equiv or the Refresh HTTP header to tell the client's browser to refresh the page after some time or use client javascript to refresh the page:
Router:
router.post("/example", (req, res) => {
res.header("Refresh", "10"); // tells the browser to refresh the page after 10 seconds
res.send("your data");
});
Meta:
<head>
<!-- Refresh after 10 seconds -->
<meta http-equiv="refresh" content="10">
</head>
Javascript + html:
<html>
<body>
<script>
// reloads after 10 seconds
setTimeout(() => {
location.reload();
}, 10000);
// or you could have some kind of API to tell when to refresh the page
function check() {
const x = new XMLHttpRequest();
x.open("GET", "some path");
x.send();
x.onload = function() {
if (x.response === "done") {
location.reload();
} else {
setTimeout(check, 1000);
}
}
}
check();
</script>
</body>
</html>
Server Sent Events are a valuable tool to open a persistent connection to a web server, where the server has the ability to push new data to the client, when available.
Using this technology in Node.js is quite straightforward and can be implemented with the following code example:
#!/usr/bin/env node
'use strict';
const http = (options, listener) => require('http').createServer(listener).listen(options.port);
http({ port: 8080 }, (req, res) => {
switch (req.url) {
case '/server-sent-events': {
res.writeHead(200, {
'Content-Type': 'text/event-stream',
'Connection': 'keep-alive',
'Cache-Control': 'no-cache',
});
const sendDate = () => res.write(`data: ${new Date()}\n\n`);
sendDate();
const interval = setInterval(sendDate, 1000);
req.on('close', () => clearInterval(interval));
} break;
default: {
res.writeHead(200, {
'Content-Type': 'text/html; charset=utf-8',
});
res.end(`
<!DOCTYPE html>
<html>
<head>
<title>Server Send Events</title>
<meta charset="utf-8">
<script>
const sse = new EventSource('/server-sent-events');
sse.onerror = () => document.body.innerHTML = 'Connection Error';
sse.onmessage = ({ data }) => document.body.innerHTML = data;
</script>
</head>
<body></body>
</html>
`);
}
}
});
Unfortunately I am not able to achieve the same goal with Deno, as there is no simple write method on the request object, but I guess it has to be implemented somehow using the req.w buffer. Can you help me please finish off the following example code, so the Server Sent Events can be utilised with Deno as well?
#!/usr/bin/env deno run --allow-net
import { listenAndServe as http } from 'https://deno.land/std/http/server.ts';
http({ port: 8080 }, (req) => {
switch (req.url) {
case '/server-sent-events': {
// missing steps:
// * setup the server sent event headers
// * create the interval and send the date periodically
// * clear the interval when the connection gets closed
} break;
default: {
req.respond({
headers: new Headers({
'Content-Type': 'text/html; charset=utf-8',
}),
body: `
<!DOCTYPE html>
<html>
<head>
<title>Server Send Events</title>
<meta charset="utf-8">
<script>
const sse = new EventSource('/server-sent-events');
sse.onerror = () => document.body.innerHTML = 'Connection Error';
sse.onmessage = ({ data }) => document.body.innerHTML = data;
</script>
</head>
<body></body>
</html>
`,
});
}
}
});
Thank you very much for your support!
[Update 2021-11-04]:
I have made some progress doing some research across different sources (https://deno.land/std#0.76.0/http/server.ts, https://github.com/denoland/deno/issues/4817) and got a step closer to the solution. Using the updated example below at least the setup and usage of the Server Sent Events do work now. The remaining issue (besides cleaning up and refactoring of the code) remains the safe detection when the incoming request has been closed (see comments in the source code below):
#!/usr/bin/env deno run --allow-net
import { listenAndServe as http } from 'https://deno.land/std/http/server.ts';
http({ port: 8080 }, (req) => {
switch (req.url) {
case '/server-sent-events': {
// set up a quick´n´dirty write method without error checking
req.write = (data) => {
req.w.write(new TextEncoder().encode(data));
req.w.flush();
};
// setup the server sent event headers
let headers = '';
headers += 'HTTP/1.1 200 OK\r\n';
headers += 'Connection: keep-alive\r\n';
headers += 'Cache-Control: no-cache\r\n';
headers += 'Content-Type: text/event-stream\r\n';
headers += '\r\n';
req.write(headers);
// create the interval and send the date periodically
const sendDate = () => req.write(`data: ${new Date()}\n\n`);
sendDate();
const interval = setInterval(sendDate, 1000);
// final missing step:
// * clear the interval when the connection gets closed
// currently dropping the connection from the client will
// result in the error: Uncaught (in promise) BrokenPipe:
// Broken pipe (os error 32)
// this error also does not seem to be catchable in the
// req.write method above, so there needs to be another safe
// way to prevent this error from occurring.
} break;
default: {
req.respond({
headers: new Headers({
'Content-Type': 'text/html; charset=utf-8',
}),
body: `
<!DOCTYPE html>
<html>
<head>
<title>Server Send Events</title>
<meta charset="utf-8">
<script>
const sse = new EventSource('/server-sent-events');
sse.onerror = () => document.body.innerHTML = 'Connection Error';
sse.onmessage = ({ data }) => document.body.innerHTML = data;
</script>
</head>
<body></body>
</html>
`,
});
}
}
});
[Update 2021-04-16]
All issues have been resolved and are posted in my accepted answer below.
Deno's http library doesn't support SSE, but you can use Oak Framework, or implement it yourself.
import { Application, Router } from "https://deno.land/x/oak/mod.ts";
const app = new Application();
const router = new Router();
router.get('/', ctx => {
ctx.response.body = `
<!DOCTYPE html>
<html>
<head>
<title>Server Send Events</title>
<meta charset="utf-8">
<script>
const sse = new EventSource('/server-sent-events');
sse.onerror = () => document.body.innerHTML = 'Connection Error';
sse.onmessage = ({ data }) => document.body.innerHTML = data;
</script>
</head>
<body></body>
</html>
`;
})
router.get("/server-sent-events", (ctx) => {
const target = ctx.sendEvents();
const sendDate = () => target.dispatchMessage(`${new Date()}`);
sendDate();
const interval = setInterval(sendDate, 1000);
});
app.use(router.routes());
await app.listen({ port: 8080 });
In the end I have found an answer to my question and so the full answer with plenty of comments follows, so you get a working version of server sent events in Deno. The solution below also solves the os error 32, which gets caused by not catching the connection writer flash method:
#!/usr/bin/env deno run --allow-net
// imports
import { ServerRequest, listenAndServe as http } from 'https://deno.land/std/http/server.ts';
// commodity
const encoder = new TextEncoder();
const print = console.log;
// start the web-server
// this one allows the endpoint `/server-sent-events`, which hosts a clock that
// will be refreshed every second (the efficiency of the clock solution could of
// course be optimised, as every client gets its own clock interval, but this
// this does not matter as this example wants to show how to setup and clean a
// task for every connecting client)
// all other requests will be answered with a simple html page that subscribes
// to the sse-based clock
http({ port: 8080 }, async (req) => {
// ip address of the client (formatted as `ip:port`, so we cut the `:port` part
// of it)
const ip = req.headers.get('host').split(':').slice(0, -1).join(':');
// determine the endpoint to access
switch (req.url) {
// host the server sent event based clock
case '/server-sent-events': {
// logging
print(`+ Client ${ip} connected`);
// prepare the disconnect promise. we will use this one later on to await
// the clients disconnect, so we can properly clean up. so the promise will
// be resolved manually by us when we detect a disconnect from the client
// on an attempt to send new data to him (unfortunately there seems to be
// no other way to detect when the client actually closed the connection)
let resolver;
const disconnect = new Promise((resolve) => resolver = resolve);
// write helper
req.write = async (data) => {
// send the current data to the client
req.w.write(encoder.encode(data));
// to actually send the data we need to flush the writer first. we need
// to try/catch this part, as not handling errors on flush will lead to
// the `Broken pipe (os error 32)` error
try {
await req.w.flush();
} catch(err) {
// throw any errors but the broken pipe, which gets thrown when the
// client has already disconnected and we try to send him new data
// later on
if (err.name !== 'BrokenPipe') {
throw err;
}
// close the connection from our side as well
req.conn.close();
// resolve our `disconnect` promise, so we can clean up
resolver();
}
};
// date writer (interval method which pushes the current date to the client)
const sendDate = async () => await req.write(`data: ${new Date()}\n\n`);
// prepare and send the headers
let headers = '';
headers += `HTTP/1.1 200 OK\r\n`;
headers += `Connection: keep-alive\r\n`;
headers += `Cache-Control: no-cache\r\n`;
headers += `Content-Type: text/event-stream\r\n`;
headers += `\r\n`;
await req.write(headers);
// send the date now for the first time and then every second
sendDate();
const interval = setInterval(sendDate, 1000);
// await until the clients disconnects to clean up. so we will be "stuck"
// here until a disconnect gets detected as we use a promise based approach
// to detect the disconnect
await disconnect;
clearInterval(interval);
// logging
print(`- Client ${ip} disconnected`);
} break;
// all other requests host a simple html page which subscribes to the clock
default: {
print(`* Serve website to ${ip}`);
req.respond({
headers: new Headers({
'Content-Type': 'text/html; charset=utf-8',
}),
body: `
<!DOCTYPE html>
<html>
<head>
<title>Server Sent Events</title>
<meta charset="utf-8">
<script>
const sse = new EventSource('/server-sent-events');
sse.onerror = () => document.body.innerHTML = 'Connection Error';
sse.onmessage = ({ data }) => document.body.innerHTML = data;
</script>
</head>
<body></body>
</html>
`,
});
}
}
});
This example seems more idiomatic.
import { serve } from "https://deno.land/std#0.116.0/http/server.ts";
const msg = new TextEncoder().encode("data: hello\r\n");
serve(async (_) => {
let timerId: number | undefined;
const body = new ReadableStream({
start(controller) {
timerId = setInterval(() => {
controller.enqueue(msg);
}, 1000);
},
cancel() {
if (typeof timerId === "number") {
clearInterval(timerId);
}
},
});
return new Response(body, {
headers: {
"Content-Type": "text/event-stream",
},
});
});
console.log("Listening on http://localhost:8000");
I have been reading about Deno for a few hours and finally got an http static server running.
I would like to know what else is needed to add https to it.
I understand Leaf to Root arrangement of certificates but not in Deno.
Working code:
import {
gray,
green,
cyan,
bold,
yellow,
red,
} from 'https://deno.land/std#0.58.0/fmt/colors.ts';
import { Application, HttpError, send, Status } from 'https://deno.land/x/oak/mod.ts';
const app = new Application();
// Middleware 1 - Error handler
app.use(async (context, next) => {
try {
await next();
} catch (e) {
if (e instanceof HttpError) {
context.response.status = e.status as any;
// expose
// Determines if details about the error should be automatically exposed in a response.
// This is automatically set to true for 4XX errors, as they represent errors in the request...
if (e.expose) {
context.response.body = `
<!DOCTYPE html>
<html>
<body>
<h1>${e.status} - ${Status[e.status]}</h1>
<!-- <h1>${e.status} - ${e.message}</h1> -->
</body>
</html>`;
} else {
context.response.body = `
<!DOCTYPE html>
<html>
<body>
<h1>${e.status} - ${Status[e.status]}</h1>
<!-- <h1>${e.status} - ${e.message}</h1> -->
</body>
</html>`;
}
} else if (e instanceof Error) {
context.response.status = 500;
// ...while 5XX errors are set to false as they are internal server errors and
// exposing details could leak important server security information.
context.response.body = `
<!DOCTYPE html>
<html>
<body>
<h1>500 - Internal Server Error</h1>
</body>
</html>`;
console.log('Unhandled Error:', red(bold(e.message)));
console.log(e.stack);
}
}
});
// Middleware 2 - Logger
app.use(async (context, next) => {
await next();
const rt = context.response.headers.get('X-Response-Time');
console.log(`${green(context.request.method)} ${cyan(context.request.url.pathname)} - ${bold(String(rt),)}`, );
});
// Middleware 3 - Response Time
app.use(async (context, next) => {
const start = Date.now();
await next();
const ms = Date.now() - start;
context.response.headers.set('X-Response-Time', `${ms}ms`);
});
// End point - Send static content
app.use(async (context) => {
await context.send({
root: `${Deno.cwd()}/var/www/example1.com/public`,
index: 'index.html',
});
});
// Welcome message
app.addEventListener('listen', ({ hostname, port }) => {
console.clear();
console.log(' ');
console.log(bold('Deno 1.1.1 - Static HTTP Server'));
console.log(gray(' #host: ') + yellow(`${hostname}`));
console.log(gray(' #port: ') + yellow(`${port}`));
console.log(gray(' Status: ') + green('online'));
console.log(gray(' #root: ') + cyan('/var/www/example1.com/public'));
console.log(gray(' #index: ') + cyan('index.html'));
console.log(' ');
});
await app.listen({ hostname: '127.0.0.1', port: 80 });
// run the server
// deno run --allow-net --allow-read httpServer.ts
Read the documentation on serveTLS and listenAndServeTLS.
If you have a valid certificate, you shouldn't have any issues. However, I had some difficulty overriding rejections of self-signed SSL certificates. Also, I ended up serving ssl on a port other than 443, due to permission errors I received while trying to run an HTTPS server as a local user.
Those are the issues I ran into and how I got around them. I hope it helps.
I'm testing with some fake JSON file in a single-page app, but I always get Unexpected token < in JSON at position 0, or empty object if I use JSON.stringify() on my promise return. I'm not sure why this is happening, and I have been struggling with this for two days. Am I missing something?
My file structure is like:
-server
-client
-action
-movies.js
-route
-fakeData.json
-movieList.js
-index.js
When I use POSTMAN, I can see the return object, and when I inspect the browser in network tab, I can see the request, and it return status(200). I'm doing this with Redux-thunk, as well http-proxy-middleware library for them to talk
fakeData.json::
{
"movies": [
{
"name": "test1",
"url": "https://example1.com"
},
{
"name": "test2",
"url": "https://example2.com"
}
]
}
movieList.js:
const characters = require('../FakeData/characters.json');
// reads the file synchronously
var fs = require("fs");
module.exports = (app) => {
app.get('/api/characters', (req, res) => {
const fileContents = fs.readFileSync(__dirname+'/characters.json', 'utf8')
try {
const data = JSON.parse(fileContents)
res.json({data})
} catch(err) {
console.error(err)
}
// res.json(characters)
});
}
index.js:
const express = require('express');
const bodyParser = require('body-parser');
const app = express();
app.use(bodyParser.json());
app.use(bodyParser.urlencoded({ extended: false }));
require('./routes/movieList')(app);
if (process.env.NODE_ENV === 'production') {
// Express will server up production assets like main.css or main.js
app.use(express.static('client/build'));
const path = require('path');
// Express serves up index.html if it doesn't recognize the route
app.get('/', (req, res) => { // Catch the rest
res.sendFile(path.resolve(__dirname, 'client', 'build', 'index.html'));
});
}
const PORT = process.env.PORT || 5000;
app.listen(PORT)
movies.js:
export const FETCH_CHAR = "fetch_character";
export const SET_CHARS = "set_characters";
function handleResponse(response) {
if(response.ok) {
return response.json()
} else {
let error = new Error(response.statusText);
error.response = response;
throw error;
}
}
export function setMovies(characters) {
return {
type: SET_CHARS,
characters
}
}
export function fetchChars() {
return dispatch => {
fetch('/api/characters')
.then(res => {
// return res.text() **When I try res.text(), I got html response
return res.json() **This gives me Unexpected token < in JSON at position 0
})
.then(data => dispatch(setMovies(data.characters)))
}
}
This is what I set up proxy in my React if you are curious:
const proxy = require('http-proxy-middleware');
module.exports = function(app) {
app.use(proxy('/api/*', { target: 'http://localhost:5000' , changeOrigin: true }));
};
What I'm thinking is that after I get the data from the fake JSON file, then I can do more action on it like showing the movies data because I'm call this API call right after the code hook up the DOM.
Edit
In my movieList.js, I tried to use:
try {
const data = JSON.parse(fileContents)
res.json({ foo: data })
} catch(err) {
console.error(err)
}
and in movies.js, I try to return res instead of res.json() because I will get Unexpected token < in JSON at position 0, and in my network tab of dev tool for response, I get the following:
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8" />
<link rel="shortcut icon" href="/favicon.ico" />
<meta name="viewport" content="width=device-width, initial-scale=1" />
<meta name="theme-color" content="#000000" />
<!--
manifest.json provides metadata used when your web app is installed on a
user's mobile device or desktop. See https://developers.google.com/web/fundamentals/web-app-manifest/
-->
<link rel="manifest" href="/manifest.json" />
<!--
Notice the use of in the tags above.
It will be replaced with the URL of the `public` folder during the build.
Only files inside the `public` folder can be referenced from the HTML.
Unlike "/favicon.ico" or "favicon.ico", "/favicon.ico" will
work correctly both with client-side routing and a non-root public URL.
Learn how to configure a non-root public URL by running `npm run build`.
-->
<title>React App</title>
</head>
<body>
<style>
html {
height: 100%;
min-height: 100%;
position: relative;
/* height: 100vh; */
margin: 0;
/* overflow:auto; */
}
body {
height: 100%;
position: relative;
}
#root {
height: 100%;
/* height: 100vh; */
}
</style>
<div id="root"></div>
<!--
This HTML file is a template.
If you open it directly in the browser, you will see an empty page.
You can add webfonts, meta tags, or analytics to this file.
The build step will place the bundled scripts into the <body> tag.
To begin the development, run `npm start` or `yarn start`.
To create a production bundle, use `npm run build` or `yarn build`.
-->
<script src="/static/js/bundle.js"></script><script src="/static/js/1.chunk.js"></script><script src="/static/js/main.chunk.js"></script><script src="/main.8c1de248b2f13e929d84.hot-update.js"></script></body>
</html>
You don't need the brackets in your response.
.then(response => response.json())
.then(data => console.log(data))
If you are trying to create the mock data from the server why dont create a variable that is the json and export it?
The problem that is happening, based in my experience, is that you are sending a string and the client is expecting a JSON.
Try something like:
fakeData.js:
const fake = {
"movies": [
{
"name": "test1",
"url": "https://example1.com"
},
{
"name": "test2",
"url": "https://example2.com"
}
]
}
module.exports = fake
And in movieList.js:
const characters = require('./whatever-path/fakeData.js');
module.exports = (app) => {
app.get('/api/characters', (req, res) => {
res.json({characters})
});
}
Hope it works!