visit
A complete guide to building, running & deploying a DApp with your own Text-to Image script to mint AI-generated art NFTs on FVM Hyperspace Testnet!
(get it - it's a pancake stack #sorrynotsorry)
💡 TLDR Tip 💡
This script is already available for use through Bacalhau via the CLI and an HTTP endpoint, so feel free to skip this part.
Quick Intro to Stable Diffusion
Don't worry though - we don't need to go and train a machine learning model for this (though hey - if that's your thing - you totally could!)
The Python Script
🦄 You can find a complete walkthrough of how to build and Dockerise this text-to-image script and run it on Bacalhau in both the and in this .🦄 You can also run it in this
import argparse
from stable_diffusion_tf.stable_diffusion import Text2Image
from PIL import Image
import os
parser = argparse.ArgumentParser(description="Stable Diffusion")
parser.add_argument("--h",dest="height", type=int,help="height of the image",default=512)
parser.add_argument("--w",dest="width", type=int,help="width of the image",default=512)
parser.add_argument("--p",dest="prompt", type=str,help="Description of the image you want to generate",default="cat")
parser.add_argument("--n",dest="numSteps", type=int,help="Number of Steps",default=50)
parser.add_argument("--u",dest="unconditionalGuidanceScale", type=float,help="Number of Steps",default=7.5)
parser.add_argument("--t",dest="temperature", type=int,help="Number of Steps",default=1)
parser.add_argument("--b",dest="batchSize", type=int,help="Number of Images",default=1)
parser.add_argument("--o",dest="output", type=str,help="Output Folder where to store the Image",default="./")
args=parser.parse_args()
height=args.height
width=args.width
prompt=args.prompt
numSteps=args.numSteps
unconditionalGuidanceScale=args.unconditionalGuidanceScale
temperature=args.temperature
batchSize=args.batchSize
output=args.output
generator = Text2Image(
img_height=height,
img_width=width,
jit_compile=False, # You can try True as well (different performance profile)
)
img = generator.generate(
prompt,
num_steps=numSteps,
unconditional_guidance_scale=unconditionalGuidanceScale,
temperature=temperature,
batch_size=batchSize,
)
for i in range(0,batchSize):
pil_img = Image.fromarray(img[i])
image = pil_img.save(f"{output}/image{i}.png")
generator = Text2Image(
img_height=height,
img_width=width,
jit_compile=False,
)
img = generator.generate(
prompt,
num_steps=numSteps,
unconditional_guidance_scale=unconditionalGuidanceScale,
temperature=temperature,
batch_size=batchSize,
)
Not only is that centralised, it's also inefficient - due to the data being an unknown distance from the computation machine, and it can get costly fast. I failed to find any free-tier cloud computing service that offered GPU processing for this (did someone say crypto mining bans..?) and plans came in at > US$400 a month (no thankyou).
Bacalhau!
is a peer-to-peer open computation network that provides a platform for public, transparent and optionally verifiable computation processes where users can run Docker containers or Web Assembly images as tasks against any data including data stored in IPFS (& soon Filecoin). It even has support for GPU jobs and not at US$400 or more!
Running the script on Bacalhau
bacalhau docker run --gpu 1 ghcr.io/bacalhau-project/examples/stable-diffusion-gpu:0.0.1 -- python main.py --o ./outputs --p "Rainbow Unicorn"
The Smart Contract
The NFT Smart Contract is based on of ERC721 but uses the ERC721URIStorage version, which includes the metadata standard extensions (so we can pass in our IPFS-addressed metadata - which we'll save on NFT.Storage, to the contract).
💡 💡
BacalhauFRC721.sol
// SPDX-License-Identifier: MIT
pragma solidity ^0.8.4;
import "@openzeppelin/contracts/token/ERC721/extensions/ERC721URIStorage.sol";
import "@openzeppelin/contracts/utils/Counters.sol";
import "@hardhat/console.sol";
contract BacalhauFRC721 is ERC721URIStorage {
/** @notice Counter keeps track of the token ID number for each unique NFT minted in the NFT collection */
using Counters for Counters.Counter;
Counters.Counter private _tokenIds;
/** @notice This struct stores information about each NFT minted */
struct bacalhauFRC721NFT {
address owner;
string tokenURI;
uint256 tokenId;
}
/** @notice Keeping an array for each of the NFT's minted on this contract allows me to get information on them all with a read-only front end call */
bacalhauFRC721NFT[] public nftCollection;
/** @notice The mapping allows me to find NFT's owned by a particular wallet address. I'm only handling the case where an NFT is minted to an owner in this contract - but you'd need to handle others in a mainnet contract like sending to other wallets */
mapping(address => bacalhauFRC721NFT[]) public nftCollectionByOwner;
/** @notice This event will be triggered (emitted) each time a new NFT is minted - which I will watch for on my front end in order to load new information that comes in about the collection as it happens */
event NewBacalhauFRC721NFTMinted(
address indexed sender,
uint256 indexed tokenId,
string tokenURI
);
/** @notice Creates the NFT Collection Contract with a Name and Symbol */
constructor() ERC721("Bacalhau NFTs", "BAC") {
console.log("Hello Fil-ders! Now creating Bacalhau FRC721 NFT contract!");
}
/**
@notice The main function which will mint each NFT.
The ipfsURI is a link to the ipfs content identifier hash of the NFT metadata stored on NFT.Storage. This data minimally includes name, description and the image in a JSON.
*/
function mintBacalhauNFT(address owner, string memory ipfsURI)
public
returns (uint256)
{
// get the tokenID for this new NFT
uint256 newItemId = _tokenIds.current();
// Format info for saving to our array
bacalhauFRC721NFT memory newNFT = bacalhauFRC721NFT({
owner: msg.sender,
tokenURI: ipfsURI,
tokenId: newItemId
});
//mint the NFT to the chain
_mint(owner, newItemId);
//Set the NFT Metadata for this NFT
_setTokenURI(newItemId, ipfsURI);
_tokenIds.increment();
//Add it to our collection array & owner mapping
nftCollection.push(newNFT);
nftCollectionByOwner[owner].push(newNFT);
// Emit an event on-chain to say we've minted an NFT
emit NewBacalhauFRC721NFTMinted(
msg.sender,
newItemId,
ipfsURI
);
return newItemId;
}
/**
* @notice helper function to display NFTs for frontends
*/
function getNFTCollection() public view returns (bacalhauFRC721NFT[] memory) {
return nftCollection;
}
/**
* @notice helper function to fetch NFT's by owner
*/
function getNFTCollectionByOwner(address owner) public view returns (bacalhauFRC721NFT[] memory){
return nftCollectionByOwner[owner];
}
Requirements
I'll be deploying this contract to the , but you could deploy this contract to any EVM-compatible chain including Polygon, BSC, Optimism, Arbitrum, Avalanche and more. You could even tweak your front end to make a multi-chain NFT (hint: )!
Deploying the Smart Contract with Hardhat
I'm using hardhat to deploy this contract to the Hyperspace testnet.
🛸 Hyperspace RPC & BlockExplorer Options:
Public RPC Endpoints BlockExplorer'sOpen API:
hardhat.config.ts
import '@nomicfoundation/hardhat-toolbox';
import { config as dotenvConfig } from 'dotenv';
import { HardhatUserConfig } from 'hardhat/config';
import { resolve } from 'path';
//Import our customised tasks
// import './pages/api/hardhat/tasks';
const dotenvConfigPath: string = process.env.DOTENV_CONFIG_PATH || './.env';
dotenvConfig({ path: resolve(__dirname, dotenvConfigPath) });
// Ensure that we have all the environment variables we need.
const walletPrivateKey: string | undefined = process.env.WALLET_PRIVATE_KEY;
if (!walletPrivateKey) {
throw new Error('Please set your Wallet private key in a .env file');
}
const config: HardhatUserConfig = {
solidity: '0.8.17',
defaultNetwork: 'filecoinHyperspace',
networks: {
hardhat: {},
filecoinHyperspace: {
url: '//api.hyperspace.node.glif.io/rpc/v1',
chainId: 3141,
accounts: [process.env.WALLET_PRIVATE_KEY ?? 'undefined'],
},
// bleeding edge often-reset FVM testnet
filecoinWallaby: {
url: '//wallaby.node.glif.io/rpc/v0',
chainId: 31415,
accounts: [process.env.WALLET_PRIVATE_KEY ?? 'undefined'],
//explorer: //wallaby.filscan.io/ and starboard
},
},
// I am using the path mapping so I can keep my hardhat deployment within the /pages folder of my DApp and therefore access the contract ABI for use on my frontend
paths: {
root: './pages/api/hardhat',
tests: './pages/api/hardhat/tests', //who names a directory in the singular?!!! Grammarly would not be happy
cache: './pages/api/hardhat/cache',
},
};
export default config;
And to deploy the smart contract we create a deploy script - note that I'm specifically setting the Wallet address here as the signer (owner) - there are a few mapping errors still being worked in FEVM out at the time of writing that can cause some odd behaviour.
deploy/deployBacalhauFRC721.ts
import hre from 'hardhat';
import type { BacalhauFRC721 } from '../typechain-types/contracts/BacalhauFRC721';
import type { BacalhauFRC721__factory } from '../typechain-types/factories/contracts/BacalhauFRC721__factory';
async function main() {
console.log('Bacalhau721 deploying....');
// !!!needed as hardhat's default does not map correctly to the FEVM
const owner = new hre.ethers.Wallet(
process.env.WALLET_PRIVATE_KEY || 'undefined',
hre.ethers.provider
);
const bacalhauFRC721Factory: BacalhauFRC721__factory = <
BacalhauFRC721__factory
> await hre.ethers.getContractFactory('BacalhauFRC721', owner);
const bacalhauFRC721: BacalhauFRC721 = <BacalhauFRC721>(
await bacalhauFRC721Factory.deploy()
);
await bacalhauFRC721.deployed();
console.log('bacalhauFRC721 deployed to ', bacalhauFRC721.address);
// optionally log to a file here
}
main().catch((error) => {
console.error(error);
process.exitCode = 1;
});
To deploy, run the above script in the terminal by using the following code (NB: since we've set the default network to filecoinHyperspace in our config, it's not necessary to pass a flag for the network though this is shown below)
> cd ./pages/hardhat/deploy/
npx hardhat run ./deployBacalhauFRC721.ts --network filecoinHyperspace
The API currently only directly hits the stable diffusion scripts documented in this blog, however, the team is in the process of extending it into a more generic API so that you can call any of the examples, and also your own deployed scripts from an HTTP REST API. Keep an eye on this or in the #bacalhau channel in
>run/test in terminal
curl -XPOST -d '{"prompt": "rainbow unicorn"}' '//dashboard.bacalhau.org:1000/api/v1/stablediffusion';
>react / typescript code
import { CID } from 'multiformats/cid';
export const callBacalhauJob = async (promptInput: string) => {
//Bacalahau HTTP Stable Diffusion Endpoint
const url = '//dashboard.bacalhau.org:1000/api/v1/stablediffusion';
const headers = {
'Content-Type': 'application/x-www-form-urlencoded',
};
const data = {
prompt: promptInput, //The user text prompt!
};
/* FETCH FROM BACALHAU ENDPOINT */
const cid = await fetch(url, {
method: 'POST',
body: JSON.stringify(data),
headers: headers,
})
.then(async (res) => {
let body = await res.json();
if (body.cid) {
/* Bacalhau returns a V0 CID which we want to convert to a V1 CID for easier usage with http gateways (ie. displaying the image on web), so I'm using the IPFS multiformats package to convert it here */
return CID.parse(body.cid).toV1().toString();
}
})
.catch((err) => {
console.log('error in bac job', err);
});
return cid;
};
This function will return an IPFS CID (content identifier) with a folder structure like the one below. The image can then be found under /outputs/image0.png
.
💡 ! 💡
When creating NFTs, it's important to note that unless you are storing the metadata on-chain (which can become prohibitively expensive for large files), then in order to conform to the 'non-fungibility' of a token, you need storage that is persistent, reliable and immutable.
Using NFT.Storage means that we get an immutable IPFS file CID (content - not location - identifier) for our metadata which is not just pinned to IPFS but also then stored to Filecoin for persistence.You'll just need to sign up for NFT.Storage and get an (to save in your .env file) for this one.
.env example
NEXT_PUBLIC_NFT_STORAGE_API_KEY=xxx
import { NFTStorage } from 'nft.storage';
//connect to NFT.Storage Client
const NFTStorageClient = new NFTStorage({
token: process.env.NEXT_PUBLIC_NFT_STORAGE_API_KEY,
});
const createNFTMetadata = async (
promptInput: string,
imageIPFSOrigin: string, //the ipfs path eg. ipfs://[CID]
imageHTTPURL: string //an ipfs address fetchable through http for the front end to use (ie. including an ipfs http gateway on it like //[CID].ipfs.nftstorage.link)
) => {
console.log('Creating NFT Metadata...');
let nftJSON;
// let's get the image data Blob from the IPFS CID that was returned from Bacalhau earlier...
await getImageBlob(status, setStatus, imageHTTPURL).then(
async (imageData) => {
// Now let's create a unique CID for that image data - since we don't really want the rest of the data returned from the Bacalhau job..
await NFTStorageClient.storeBlob(imageData)
.then((imageIPFS) => {
console.log(imageIPFS);
//Here's the JSON construction - only name, description and image are required fields- but I also want to save some other properties like the ipfs link and perhaps you have other properties that give your NFT's rarity to add as well
nftJSON = {
name: 'Bacalhau Hyperspace NFTs 2023',
description: promptInput,
image: imageIPFSOrigin,
properties: {
prompt: promptInput,
type: 'stable-diffusion-image',
origins: {
ipfs: `ipfs://${imageIPFS}`,
bacalhauipfs: imageIPFSOrigin,
},
innovation: 100,
content: {
'text/markdown': promptInput,
},
},
};
})
.catch((err) => console.log('error creating blob cid', err));
}
);
return nftJSON;
};
await NFTStorageClient.store(nftJson)
.then((metadata) => {
// DONE! - do something with this returned metadata!
console.log('NFT Data pinned to IPFS & stored on Filecoin!');
console.log('Metadata URI: ', metadata.url);
// once saved we can use it to mint the NFT
// mintNFT(metadata);
})
.catch((err) => {
console.log('error uploading to nft.storage');
});
💡 Quick Tip 💡NFT.Storage also offers a range of other like storeCar & storeDirectory as well as a status() function - which returns the IPFS pinning and Filecoin storage deals of a CID -> this could be a pretty cool addition for a FEVM DApp (or NFT implementation on FEVM once FEVM hits mainnet release) for checking on NFTs status.
Connecting to the contract in read mode with a public RPC:
//The compiled contract found in pages/api/hardhat/artifacts/contracts
import BacalhauCompiledContract from '@Contracts/BacalhauFRC721.sol/BacalhauFRC721.json';
//On-chain address of the contract
const contractAddressHyperspace = '0x773d8856dd7F78857490e5Eea65111D8d466A646';
//A public RPC Endpoint (see table from contract section)
const rpc = '//api.hyperspace.node.glif.io/rpc/v1';
const provider = new ethers.providers.JsonRpcProvider(rpc);
const connectedReadBacalhauContract = new ethers.Contract(
contractAddressHyperspace,
BacalhauCompiledContract.abi,
provider
);
//use the read-only connected Bacalhau Contract
connectedReadBacalhauContract.on(
// Listen for the specific event we made in our contract
'NewBacalhauFRC721NFTMinted',
(sender: string, tokenId: number, tokenURI: string) => {
//DO STUFF WHEN AN EVENT COMES IN
// eg. re-fetch NFT's, store in state and change page status
}
);
Connecting to the contract in write mode - this requires that the Ethereum object is being injected into the web browser by a wallet so that a user can sign for a transaction and pay for gas - which is why we're checking for a window.ethereum object.
//Typescript needs to know window is an object with potentially and ethereum value. There might be a better way to do this? Open to tips!
declare let window: any;
//The compiled contract found in pages/api/hardhat/artifacts/contracts
import BacalhauCompiledContract from '@Contracts/BacalhauFRC721.sol/BacalhauFRC721.json';
//On-chain address of the contract
const contractAddressHyperspace = '0x773d8856dd7F78857490e5Eea65111D8d466A646';
//check for the ethereum object
if (!window.ethereum) {
//ask user to install a wallet or connect
//abort this
}
// else there's a wallet provider
else {
// same function - different provider - this one has a signer - the user's connected wallet address
const provider = new ethers.providers.Web3Provider(window.ethereum);
const contract = new ethers.Contract(
contractAddressHyperspace,
BacalhauCompiledContract.abi,
provider
);
const signer = provider.getSigner();
const connectedWriteBacalhauContract = contract.connect(signer);
}
Calling the mint Function using the write connected contract.
declare let window: any;
const fetchWalletAccounts = async () => {
console.log('Fetching wallet accounts...');
await window.ethereum //use ethers?
.request({ method: 'eth_requestAccounts' })
.then((accounts: string[]) => {
return accounts;
})
.catch((error: any) => {
if (error.code === 4001) {
// EIP-1193 userRejectedRequest error
console.log('Please connect to MetaMask.');
} else {
console.error(error);
}
});
};
const fetchChainId = async () => {
console.log('Fetching chainId...');
await window.ethereum
.request({ method: 'eth_chainId' })
.then((chainId: string[]) => {
return chainId;
})
.catch((error: any) => {
if (error.code === 4001) {
// EIP-1193 userRejectedRequest error
console.log('Please connect to MetaMask.');
} else {
console.error(error);
}
});
};
//!! This function checks for a wallet connection WITHOUT being intrusive to to the user or opening their wallet
export const checkForWalletConnection = async () => {
if (window.ethereum) {
console.log('Checking for Wallet Connection...');
await window.ethereum
.request({ method: 'eth_accounts' })
.then(async (accounts: String[]) => {
console.log('Connected to wallet...');
// Found a user wallet
return true;
})
.catch((err: Error) => {
console.log('Error fetching wallet', err);
return false;
});
} else {
//Handle no wallet connection
return false;
}
};
//Subscribe to changes on a user's wallet
export const setWalletListeners = () => {
console.log('Setting up wallet event listeners...');
if (window.ethereum) {
// subscribe to provider events compatible with EIP-1193 standard.
window.ethereum.on('accountsChanged', (accounts: any) => {
//logic to check if disconnected accounts[] is empty
if (accounts.length < 1) {
//handle the locked wallet case
}
if (userWallet.accounts[0] !== accounts[0]) {
//user has changed address
}
});
// Subscribe to chainId change
window.ethereum.on('chainChanged', () => {
// handle changed chain case
});
} else {
//handle the no wallet case
}
};
export const changeWalletChain = async (newChainId: string) => {
console.log('Changing wallet chain...');
const provider = window.ethereum;
try {
await provider.request({
method: 'wallet_switchEthereumChain',
params: [{ chainId: newChainId }], //newChainId
});
} catch (error: any) {
alert(error.message);
}
};
//AddHyperspaceChain
export const addHyperspaceNetwork = async () => {
console.log('Adding the Hyperspace Network to Wallet...');
if (window.ethereum) {
window.ethereum
.request({
method: 'wallet_addEthereumChain',
params: [
{
chainId: '0xc45',
rpcUrls: [
'//hyperspace.filfox.info/rpc/v0',
'//filecoin-hyperspace.chainstacklabs.com/rpc/v0',
],
chainName: 'Filecoin Hyperspace',
nativeCurrency: {
name: 'tFIL',
symbol: 'tFIL',
decimals: 18,
},
blockExplorerUrls: [
'//fvm.starboard.ventures/contracts/',
'//hyperspace.filscan.io/',
'//beryx.zondax.chfor',
],
},
],
})
.then((res: XMLHttpRequestResponseType) => {
console.log('added hyperspace successfully', res);
})
.catch((err: ErrorEvent) => {
console.log('Error adding hyperspace network', err);
});
}
};
// Pass in the metadata return from saving to NFT.Storage
const mintNFT = async (metadata: any) => {
await connectedWriteBacalhauContract
// The name of our function in our smart contract
.mintBacalhauNFT(
userWallet.accounts[0], //users account to use
metadata.url //test ipfs address
)
.then(async (data: any) => {
console.log('CALLED CONTRACT MINT FUNCTION', data);
await data
.wait()
.then(async (tx: any) => {
console.log('tx', tx);
//CURRENTLY NOT RETURNING TX - (I use event triggering to know when this function is complete)
let tokenId = tx.events[1].args.tokenId.toString();
console.log('tokenId args', tokenId);
setStatus({
...INITIAL_TRANSACTION_STATE,
success: successMintingNFTmsg(data),
});
})
.catch((err: any) => {
console.log('ERROR', err);
setStatus({
...status,
loading: '',
error: errorMsg(err.message, 'Error minting NFT'),
});
});
})
.catch((err: any) => {
console.log('ERROR1', err);
setStatus({
...status,
loading: '',
error: errorMsg(
err && err.message ? err.message : null,
'Error minting NFT'
),
});
});
}
Bacalhau lends itself well to performing repetitive, deterministic processing jobs over data.
With ♥️
Support Alison Haire by becoming a sponsor. Any amount is appreciated!