Error while deploying a smart contract to Mumbai testnet through Hardhat - node.js

I've been having this problem while trying to deploy a smart contract to the Mumbai testnet using Hardhat, and I keep getting the following error:
Error HH9: Error while loading Hardhat's configuration.
You probably tried to import the "hardhat" module from your config or a file imported from it.
This is not possible, as Hardhat can't be initialized while its config is being defined.
To learn more about how to access the Hardhat Runtime Environment from different contexts go to https://hardhat.org/hre
Here's my smart contract code:
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.1;
// implements the ERC721 standard
import "#openzeppelin/contracts/token/ERC721/ERC721.sol";
// keeps track of the number of tokens issued
import "#openzeppelin/contracts/utils/Counters.sol";
import "#openzeppelin/contracts/access/Ownable.sol";
// Here we need to get the contract object sent from the frontend React app and replace the properties of the contract hereunder
// Accessing the Ownable method ensures that only the creator of the smart contract can interact with it
contract myContract is ERC721, Ownable {
using Counters for Counters.Counter;
Counters.Counter private currentTokenId;
/// #dev Base token URI used as a prefix by tokenURI().
string public baseTokenURI;
constructor() ERC721("MyToken", "MTK") {
baseTokenURI = "";
}
function mintTo(address recipient) public returns (uint256) {
currentTokenId.increment();
uint256 newItemId = currentTokenId.current();
_safeMint(recipient, newItemId);
return newItemId;
}
/// #dev Returns an URI for a given token ID
function _baseURI() internal view virtual override returns (string memory) {
return baseTokenURI;
}
/// #dev Sets the base token URI prefix.
function setBaseTokenURI(string memory _baseTokenURI) public {
baseTokenURI = _baseTokenURI;
}
}
Here's the deploy script:
const { ethers } = require("hardhat");
async function main() {
// Fetching the compiled contract using ethers.js
const contract = await ethers.getContractFactory("myContract");
// calling deploy() will return an async Promise that we can await on
const CustomSC = await contract.deploy();
console.log(`Contract deployed to address: ${CustomSC.address}`);
}
main()
.then(() => process.exit(0))
.catch((error) => {
console.error(error);
process.exit(1);
});
And here's my hardhat.config file:
/**
* #type import('hardhat/config').HardhatUserConfig
*/
require('dotenv').config();
require("#nomiclabs/hardhat-ethers");
require("#nomiclabs/hardhat-waffle");
require("./scripts/deploy.js");
require("#nomiclabs/hardhat-etherscan");
const { MORALIS_POLYGON_KEY, POLYGONSCAN_API_KEY, ACCOUNT_PRIVATE_KEY } = process.env;
module.exports = {
solidity: "0.8.1",
defaultNetwork: "mumbai",
networks: {
hardhat: {},
mumbai: {
url: `${MORALIS_POLYGON_KEY}`,
accounts: [`0x${ACCOUNT_PRIVATE_KEY}`],
},
},
etherscan: {
apiKey: POLYGONSCAN_API_KEY,
},
};
And here's my package.json file:
{
"name": "backend",
"version": "1.0.0",
"description": "backend for the NFT Marketplace dApp",
"main": "src/server.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1",
"start": "nodemon src/server.js",
"build": "node src/server.js"
},
"author": "Ayed Oukhay",
"license": "ISC",
"dependencies": {
"#openzeppelin/contracts": "^4.0.0",
"body-parser": "^1.20.0",
"cors": "^2.8.5",
"dotenv": "^16.0.0",
"express": "^4.18.1",
"helmet": "^5.0.2",
"mongodb": "^4.5.0",
"mongoose": "^6.3.2",
"nodemon": "^2.0.16",
"web3": "^1.7.3"
},
"devDependencies": {
"#nomiclabs/hardhat-ethers": "^2.0.6",
"#nomiclabs/hardhat-etherscan": "^3.0.3",
"#nomiclabs/hardhat-waffle": "^2.0.3",
"chai": "^4.3.6",
"ethereum-waffle": "^3.4.4",
"ethers": "^5.6.6",
"hardhat": "^2.9.5"
}
}
When I tried fixing it by replacing the following line:
const { ethers } = require("hardhat");
with: const { ethers } = require("hardhat/config");
I get the following error:
TypeError: Cannot read property 'getContractFactory' of undefined
And even when I replaced the deploy.js code with one that's based on tasks and helpers, it compiles successfully but the npx hardhat run scripts/deploy.js --network mumbai it doesn't return anything.
here's the code that I replaced it with:
deploy.js
const { task } = require("hardhat/config");
const { getAccount } = require("./helpers.js");
task("deploy", "Deploys the smart contract ...").setAction(async function (taskArguments, hre) {
const myContractFactory = await hre.ethers.getContractFactory("myContract", getAccount());
console.log('Deploying myContract...');
const contract = await myContractFactory.deploy();
await contract.deployed();
console.log(`Contract deployed to address: ${contract.address}`);
});
and helpers.js
const { ethers } = require("ethers");
const { getContractAt } = require("#nomiclabs/hardhat-ethers/internal/helpers");
// Helper method for fetching environment variables from .env
function getEnvVariable(key, defaultValue) {
if (process.env[key]) {
return process.env[key];
}
if (!defaultValue) {
throw `${key} is not defined and no default value was provided`;
}
return defaultValue;
}
// Helper method for fetching a connection provider to the Ethereum network
function getProvider() {
return ethers.getDefaultProvider(getEnvVariable("NETWORK", "mumbai"), {
moralis: getEnvVariable("MORALIS_POLYGON_KEY"),
});
}
// RQ:: The getProvider() helper also lets us use other EVM networks (like Ethereum mainnet or Polygon) by optionally setting a NETWORK environment variable in .env.
// Helper method for fetching a wallet account using an environment variable for the PK
function getAccount() {
return new ethers.Wallet(getEnvVariable("ACCOUNT_PRIVATE_KEY"), getProvider());
}
// Helper method for fetching a contract instance at a given address
function getContract(contractName, hre) {
const account = getAccount();
return getContractAt(hre, contractName, getEnvVariable("NFT_CONTRACT_ADDRESS"), account);
}
module.exports = {
getEnvVariable,
getProvider,
getAccount,
getContract,
}
Any help would be really appreciated, I've been stuck on this for almost a week now.

Ironically, I found the solution just as I posted this. well, here it is for anyone who's facing the same problem: in the hardhat.config file, remove the '${}' from both network url and accounts. that solved it for me. Can't believe it took me so long to figure out lol
so your config file should look like this:
/**
* #type import('hardhat/config').HardhatUserConfig
*/
require('dotenv').config();
require("#nomiclabs/hardhat-ethers");
require("#nomiclabs/hardhat-waffle");
// require("./scripts/deploy.js");
require("#nomiclabs/hardhat-etherscan");
const { MORALIS_POLYGON_KEY, POLYGONSCAN_API_KEY, ACCOUNT_PRIVATE_KEY } = process.env;
module.exports = {
solidity: "0.8.1",
defaultNetwork: "mumbai",
networks: {
hardhat: {},
mumbai: {
url: MORALIS_POLYGON_KEY,
accounts: [ACCOUNT_PRIVATE_KEY],
},
},
etherscan: {
apiKey: POLYGONSCAN_API_KEY,
},
};

Related

Can't mock a ES6 imported function in NodeJS

I've been trying for some time to mock the fetchLiveMatches imported function with no success. I've been browsing for some ideas but I think I ran out of it, so I could use some help. Any ideas of what I am doing wrong?
live.test.js
import * as liveController from "./live";
import { jest } from "#jest/globals";
import * as liveService from "../service/live";
import { buildReq, buildRes, buildNext } from "../utils/testingHelper";
jest.mock("../service/live");
beforeEach(() => {
jest.clearAllMocks();
});
describe("Live Controller", () => {
test("calls fetchLiveMatches function to fetch from external API", async () => {
const req = buildReq();
const res = buildRes();
const next = buildNext();
await liveController.getLiveMatches(req, res, next);
expect(next).not.toHaveBeenCalled();
expect(liveService.fetchLiveMatches).toHaveBeenCalled();
expect(res.status).toHaveBeenCalledWith(500);
expect(res.status).toHaveBeenCalledTimes(1);
});
});
service/live.js
import axios from "axios";
async function fetchLiveMatches() {
// Some hidden code
return axios({
method: "get",
url: `${API_FOOTBALL_BASE_URL}${GET_EVENTS}${MATCH_LIVE}${WIDGET_KEY}${TIMEZONE}${DETAILS}`,
headers: {}
}).then(res => res.data);
}
export { fetchLiveMatches };
jest.config.js
export default {
testEnvironment: "jest-environment-node",
transform: {}
};
package.json
{
"name": "server",
"version": "1.0.0",
"main": "index.js",
"type": "module",
"license": "MIT",
"dependencies": {
"express": "^4.18.2"
},
"devDependencies": {
"axios": "^1.1.3",
"eslint": "^8.26.0",
"jest": "^29.2.2",
"prettier": "^2.7.1"
},
"scripts": {
"start": "node --watch index.js",
"start:no-watch": "node index.js",
"test": "node --experimental-vm-modules node_modules/jest/bin/jest.js --watch"
}
}
Test Error Output
Live Controller › calls fetchLiveMatches function to fetch from external API
expect(received).toHaveBeenCalled()
Matcher error: received value must be a mock or spy function
Received has type: function
Received has value: [Function fetchLiveMatches]
Just posting the solution I found for anyone who eventually is facing the same problem:
First, since I'm using ES6/module imports without Babel I changed the mock function to unstable_mockModule, and then based on the docs I decided to try dynamic imports in test scope after mocking the modules.
If you're using ES module imports then you'll normally be inclined to put your import statements at the top of the test file. But often you need to instruct Jest to use a mock before modules use it. For this reason, Jest will automatically hoist jest.mock calls to the top of the module (before any imports). To learn more about this and see it in action, see this repo.
The test component works with the following code:
import { jest } from "#jest/globals";
import { buildReq, buildRes, buildNext } from "../utils/testingHelper";
describe("Live Controller", () => {
test("calls fetchLiveMatches function to fetch from external API", async () => {
jest.unstable_mockModule("../service/live", () => ({
fetchLiveMatches: jest.fn(() => [])
}));
const { getLiveMatches } = await import("./live");
const { fetchLiveMatches } = await import("../service/live");
const req = buildReq();
const res = buildRes();
const next = buildNext(msg => console.log(msg));
await getLiveMatches(req, res, next);
expect(fetchLiveMatches).toHaveBeenCalled();
expect(res.status).toHaveBeenCalledWith(200);
expect(res.status).toHaveBeenCalledTimes(1);
});
});

Error: Cannot use namespace 'Observable' as a type. (apollo-link, typescript)

i am trying to create a graphql subcription with hasura, follow this example: Boilerplate to test GraphQL subscriptions using nodejs, just added types for trypescript, if i run it directly with nodemon works fine, and gets me the data i want but if try to make a tsc i get the following errors:
and my code for the subscription is:
/***** Setup a GraphQL subscription observable ******************************/
import { DocumentNode, execute, GraphQLRequest } from 'apollo-link';
import { WebSocketLink } from 'apollo-link-ws';
import { SubscriptionClient } from 'subscriptions-transport-ws';
import ws from 'ws';
const getWsClient = (wsURL: string) => {
const client = new SubscriptionClient(
wsURL, {
reconnect: true, connectionParams: () => {
return {
headers: {
'x-hasura-admin-secret': process.env.HASURA_GRAPHQL_ADMIN_SECRET
}
}
}
}, ws
);
return client;
};
// wsURL: GraphQL endpoint
// query: GraphQL query (use gql`` from the 'graphql-tag' library)
// variables: Query variables object
const createSubscription = (wsURL: string, query: DocumentNode, variables: Record<string, any>) => {
const link = new WebSocketLink(getWsClient(wsURL));
return execute(link, { query, variables } as GraphQLRequest);
};
export default createSubscription
I already updated typescript for
"nodemon": "^2.0.15",
"ts-node": "^10.2.1",
"typescript": "^4.4.2"
does any one have any ideas?
Thanks in advance.

Why am I encountering an error when deploying a nodejs function in gcloud with a zip or directly with editor?

I want to realize cloud functions, I do it on vscode. I think I use all that is necessary to realize the cloud function.
To test this one I installed # google-cloud / storage and it works perfectly on my machine, however when I compress in zip to import it into GCP at deployment it gives me an error:
(Build failed: function.js does not exist; Error ID: 7485c5b6)
While I clearly indicate the entry point of my exports.backup function that I indicate in the entry point in GCP.
Here is the code I'm trying to run - something must be missing but I can't figure it out.
package.json:
{
"name": "export-mysql",
"version": "1.0.0",
"description": "create backup database production",
"main": "index.js",
"scripts": {
"backup": "functions-framework --target=backup"
},
"author": "",
"license": "ISC",
"dependencies": {
"chalk": "^4.1.2",
"dayjs": "^1.10.7",
"dotenv": "^10.0.0",
"googleapis": "^92.0.0",
"#google-cloud/functions-framework": "^2.1.0"
}
}
code:
const { google } = require("googleapis");
const sqlAdmin = google.sqladmin("v1beta4");
const dayjs = require("dayjs");
const chalk = require("chalk");
const dotenv = require("dotenv");
const log = console.log;
const error = console.error;
dotenv.config({ path: "./config/.env" });
let = respo = "";
authorize(function (authClient) {
const date = dayjs(Date.now()).format("YYYYMMDDHHmm");
var request = {
project: "project",
instance: "database-prod",
resource: {
exportContext: {
databases: ["database"],
fileType: "SQL",
kind: "sql#exportContext",
uri: `gs://backup-database-pop/backup-prod-${date}.gz`,
},
},
auth: authClient,
};
sqlAdmin.instances.export(request, function (err, response) {
if (err) {
error(chalk.yellow.bold(`Status: ${err.code}`));
log(chalk.red.bold(`Message: ${err.message}`));
return;
}
// TODO: Change code below to process the `response` object:
// log(chalk.yellow.bold(`Status: ${response.status}`));
log(chalk.greenBright.bold(`Database Exporter dans le bucket -> backup-database-pop fichier: backup-prod-${date}.sql`));
respo = `Database Exporter dans le bucket -> backup-database-pop fichier: backup-prod-${date}.sql`;
return respo;
// log.log(JSON.stringify(response, null, 2));
});
});
function authorize(callback) {
google.auth
.getClient({
scopes: ["https://www.googleapis.com/auth/cloud-platform"],
})
.then((client) => {
callback(client);
})
.catch((err) => {
error(chalk.red.bold("authentication failed: ", err));
});
}
exports.backup = (req, res) => {
res.end();
log(respo);
log("Function complete!");
};
And here is the structure of the folder that is zipped:
functionFolder
folder -> config/.env
index.js
package.json
package-lock.json
authorize.json
Here is the solution you have to select the files and compress them and not compress the folder

Create React App doesn't properly mock modules from __mocks__ directory

I have a working example with Jest and mocks from __mocks__ directory that works :
With simple Jest setup
// package.json
{
"name": "a",
"version": "1.0.0",
"main": "index.js",
"scripts": {
"test": "jest"
},
...
"devDependencies": {
"jest": "^26.6.3"
},
"dependencies": {
"#octokit/rest": "^18.0.12"
}
}
And then /index.js :
const { Octokit } = require("#octokit/rest");
const octokit = new Octokit();
module.exports.foo = function() {
return octokit.repos.listForOrg({ org: "octokit", type: "public" })
}
with its test (/index.test.js):
const { foo } = require("./index.js");
test("foo should be true", async () => {
expect(await foo()).toEqual([1,2]);
});
and the mock (/__mocks__/#octokit/rest/index.js):
module.exports.Octokit = jest.fn().mockImplementation( () => ({
repos: {
listForOrg: jest.fn().mockResolvedValue([1,2])
}
}) );
This works quite well and tests pass.
With Create React App
However doing the same with Create React App seems to be giving me a weird result:
// package.json
{
"name": "b",
"version": "0.1.0",
"dependencies": {
"#octokit/rest": "^18.0.12",
"#testing-library/jest-dom": "^5.11.4",
"#testing-library/react": "^11.1.0",
"#testing-library/user-event": "^12.1.10",
"react": "^17.0.1",
"react-dom": "^17.0.1",
"react-scripts": "4.0.1",
"web-vitals": "^0.2.4"
},
"scripts": {
"start": "react-scripts start",
"build": "react-scripts build",
"test": "react-scripts test",
"eject": "react-scripts eject"
},
...
}
And then /src/foo.js:
import { Octokit } from "#octokit/rest";
const octokit = new Octokit();
module.exports.foo = function() {
return octokit.repos.listForOrg({ org: "octokit", type: "public" })
}
with its test (/src/foo.test.js):
const { foo} = require("./foo.js");
test("foo should be true", async () => {
expect(await foo()).toEqual([1,2]);
});
and the very same mock (under /src/__mocks__/#octokit/rest/index.js):
export const Octokit = jest.fn().mockImplementation( () => ({
repos: {
listForOrg: jest.fn().mockResolvedValue([1,2])
}
}) );
This makes the test fail:
FAIL src/foo.test.js
✕ foo should be true (2 ms)
● foo should be true
expect(received).toEqual(expected) // deep equality
Expected: [1, 2]
Received: undefined
2 |
3 | test("foo should be true", async () => {
> 4 | expect(await foo()).toEqual([1,2]);
| ^
5 | });
6 |
7 |
at Object.<anonymous> (src/foo.test.js:4:25)
After reading a lot it seems that I can't make __mocks__ work inside Create React App. What's the problem?
The problem is that CRA's default Jest setup automatically resets the mocks, which removes the mockResolvedValue you set.
One way to solve this, which also gives you more control to have different values in different tests (e.g. to test error handling) and assert on what it was called with, is to expose the mock function from the module too:
export const mockListForOrg = jest.fn();
export const Octokit = jest.fn().mockImplementation(() => ({
repos: {
listForOrg: mockListForOrg,
},
}));
Then you configure the value you want in the test, after Jest would have reset it:
import { mockListForOrg } from "#octokit/rest";
import { foo } from "./foo";
test("foo should be true", async () => {
mockListForOrg.mockResolvedValueOnce([1, 2]);
expect(await foo()).toEqual([1, 2]);
});
Another option is to add the following into your package.json to override that configuration, per this issue:
{
...
"jest": {
"resetMocks": false
}
}
This could lead to issues with mock state (calls received) being retained between tests, though, so you'll need to make sure they're getting cleared and/or reset somewhere.
Note that you generally shouldn't mock what you don't own, though - if the interface to #octokit/rest changes your tests will continue to pass but your code won't work. To avoid this issue, I would recommend either or both of:
Moving the assertions to the transport layer, using e.g. MSW to check that the right request gets made; or
Writing a simple facade that wraps #octokit/rest, decoupling your code from the interface you don't own, and mocking that;
along with higher-level (end-to-end) tests to make sure everything works correctly with the real GitHub API.
In fact, deleting the mocks and writing such a test using MSW:
import { rest } from "msw";
import { setupServer } from "msw/node";
import { foo } from "./foo";
const server = setupServer(rest.get("https://api.github.com/orgs/octokit/repos", (req, res, ctx) => {
return res(ctx.status(200), ctx.json([1, 2]));
}));
beforeAll(() => server.listen());
afterAll(() => server.close());
test("foo should be true", async () => {
expect(await foo()).toEqual([1, 2]);
});
exposes that the current assumption about what octokit.repos.listForOrg would return is inaccurate, because this test fails:
● foo should be true
expect(received).toEqual(expected) // deep equality
Expected: [1, 2]
Received: {"data": [1, 2], "headers": {"content-type": "application/json", "x-powered-by": "msw"}, "status": 200, "url": "https://api.github.com/orgs/octokit/repos?type=public"}
13 |
14 | test("foo should be true", async () => {
> 15 | expect(await foo()).toEqual([1, 2]);
| ^
16 | });
17 |
at Object.<anonymous> (src/foo.test.js:15:25)
Your implementation should actually look something more like:
export async function foo() {
const { data } = await octokit.repos.listForOrg({ org: "octokit", type: "public" });
return data;
}
or:
export function foo() {
return octokit.repos.listForOrg({ org: "octokit", type: "public" }).then(({ data }) => data);
}

How do I use Truffle to deploy a contract on ThunderCore?

How do I use Truffle to deploy a contract on ThunderCore?
Is it the same as using truffle on ethereum? how do I setup the config? Is there any document or resource to reference?
Set your Truffle project to:
Use #truffle/hdwallet-provider and point it at https://mainnet-rpc.thundercore.com
Set byzantium as the target EVM version for the Solidity compiler
See evnVersion in truffle-config.js below
ThunderCore supports byzantium as of April 2020
Self-contained Project Template
package.json
{
"name": "field-support",
"version": "0.0.1",
"main": "trufflej-config.js",
"license": "LicenseRef-COPYING",
"directories": {
"test": "test"
},
"dependencies": {
"json5": "^2.1.0",
"truffle": "^5.1.22",
"#truffle/hdwallet-provider": "^1.0.34"
},
"scripts": {
"test": "truffle test",
"migrate": "truffle migrate",
"deploy": "truffle migrate",
"compile": "truffle compile",
"build": "truffle build",
"lint": "solium -d contracts && eslint ."
},
"keywords": [
"smart contract"
],
"author": ""
}
truffle-config.js
const HDWalletProvider = require("#truffle/hdwallet-provider");
const fs = require("fs");
const MAINNET_PROVIDER = "https://mainnet-rpc.thundercore.com";
let privateKeys = null;
let mnemonic = null;
try {
privateKeys = fs
.readFileSync(".private-keys", { encoding: "ascii" })
.split("\n")
.filter(x => x.length > 0);
} catch (err) {
if (err.code !== "ENOENT") {
throw err;
}
}
if (!privateKeys) {
try {
mnemonic = fs.readFileSync(".mnemonic", { encoding: "ascii" }).trim();
} catch (err) {
if (err.code !== "ENOENT") {
throw err;
}
}
}
module.exports = {
networks: {
// For `truffle develop`
development: {
host: "127.0.0.1", // Localhost (default: none)
port: 9545, // Standard Ethereum port (default: none)
network_id: "*" // Any network (default: none)
},
"thunder-mainnet": {
provider: () => {
if (privateKeys === null && mnemonic === null) {
throw new Error("Please create a .private-keys or .mnemonic file");
}
return privateKeys
? new HDWalletProvider(
privateKeys,
MAINNET_PROVIDER,
0, // <- change address_index if you want to use non-first address
privateKeys.length
)
: new HDWalletProvider(
mnemonic,
MAINNET_PROVIDER,
0 // <- change address_index if you want to use non-first address
);
},
network_id: "108"
}
},
// Set default mocha options here, use special reporters etc.
mocha: {
// timeout: 100000
},
// Configure your compilers
compilers: {
solc: {
version: "0.5.9", // Fetch exact version from solc-bin (default: truffle's version)
settings: {
// see the solidity docs for advice about optimization and evmversion
optimizer: {
enabled: true,
runs: 200
},
evmVersion: "byzantium" // Current evm on ThunderCore fixed at "byzantium"
}
}
}
};
contracts/SimplStorage.sol
pragma solidity ^0.5;
contract SimpleStorage {
uint storedData;
function set(uint x) public {
storedData = x;
}
function get() public view returns (uint) {
return storedData;
}
}
migrations/2_deploy.js
const SimpleStorage = artifacts.require('SimpleStorage');
module.exports = function(deployer) {
deployer.deploy(SimpleStorage);
};
Sample Session
Create a wallet in Metamask
Get Thunder Tokens (TT) through the ThunderCore Mainnet Faucet
Export the private key and save it to a .private-keys file
Run truffle migrate --network thunder-mainnet:
$ truffle migrate --network thunder-mainnet
Compiling your contracts...
===========================
> Everything is up to date, there is nothing to compile.
Starting migrations...
======================
> Network name: 'thunder-mainnet'
> Network id: 108
> Block gas limit: 0x5f5e100
1_initial_migration.js
======================
Deploying 'Migrations'
----------------------
> transaction hash: 0x4d943a338d683d7ba0dc4937417b4b795b4791849b4695aeddb8811bdb265183
> Blocks: 5 Seconds: 6
> contract address: 0xFAc8a9a57cb2C70059D603f393F4A4f830C43a34
(...)
> Saving migration to chain.
> Saving artifacts
-------------------------------------
> Total cost: 0.00554924 ETH
2_deploy.js
===========
Deploying 'SimpleStorage'
-------------------------
> transaction hash: 0x8a6063fa9dd74935e0db262a03ffee38181e383e3d73dd408cc76730e79ac135
> Blocks: 5 Seconds: 6
> contract address: 0x75F89Ba793DDFFCA4b604Ae8B28E8DfD8Dbbe14a
(...)
You can find the code here in the simple-storage branch of the field-support repo.

Resources