Skip to content

Instantly share code, notes, and snippets.

@alex-fung
Created April 25, 2023 00:33
Show Gist options
  • Save alex-fung/9f1963f34ab0ab9096653fd77965fc51 to your computer and use it in GitHub Desktop.
Save alex-fung/9f1963f34ab0ab9096653fd77965fc51 to your computer and use it in GitHub Desktop.
Script to mint NFTs into a new merkle tree at 10 NFTs per second
import {
Keypair,
PublicKey,
Connection,
Transaction,
sendAndConfirmTransaction,
TransactionInstruction,
} from "@solana/web3.js";
import { createAccount, createMint, mintTo, TOKEN_PROGRAM_ID } from "@solana/spl-token";
import {
SPL_ACCOUNT_COMPRESSION_PROGRAM_ID,
createAllocTreeIx,
ValidDepthSizePair,
SPL_NOOP_PROGRAM_ID,
MerkleTreeProof,
MerkleTree,
} from "@solana/spl-account-compression";
import {
PROGRAM_ID as BUBBLEGUM_PROGRAM_ID,
MetadataArgs,
TokenProgramVersion,
TokenStandard,
createCreateTreeInstruction,
createMintToCollectionV1Instruction,
} from "@metaplex-foundation/mpl-bubblegum";
import {
PROGRAM_ID as TOKEN_METADATA_PROGRAM_ID,
CreateMetadataAccountArgsV3,
createCreateMetadataAccountV3Instruction,
createCreateMasterEditionV3Instruction,
createSetCollectionSizeInstruction,
} from "@metaplex-foundation/mpl-token-metadata";
import { Metadata, MetaplexError, Option } from "@metaplex-foundation/js";
import { program } from "commander";
import * as fs from "fs";
program
.requiredOption("-k --keypairFilePath <file_path>", 'specify keypair file location')
.requiredOption("-u --rpcUrl <rpc-url>", "specify rpc endpoint")
.requiredOption("-r --readApiUrl <read-api-url>", "specify read api url")
;
program.parse();
const opts = program.opts();
const KEYPAIR_FILE_PATH = opts.keypairFilePath
const RPC_ENDPOINT = opts.rpcUrl;
const READ_API_URL = opts.readApiUrl;
/*
* The below section is copied and mildly modified from https://github.com/solana-developers/compressed-nfts
*/
////////////////////////////////////////////////////////////////////////////////////////////////////
function loadKeypairFromFile(absPath: string): Keypair {
try {
if (!absPath) throw Error("No path provided");
if (!fs.existsSync(absPath)) throw Error("File does not exist.");
// load the keypair from the file
const keyfileBytes = JSON.parse(fs.readFileSync(absPath, { encoding: "utf-8" }));
// parse the loaded secretKey into a valid keypair
const keypair = Keypair.fromSecretKey(new Uint8Array(keyfileBytes));
return keypair;
} catch (err) {
// return false;
throw err;
}
}
async function createTree(
connection: Connection,
payer: Keypair,
treeKeypair: Keypair,
maxDepthSizePair: ValidDepthSizePair,
canopyDepth: number = 0,
) {
console.log("Creating a new Merkle tree...");
console.log("treeAddress:", treeKeypair.publicKey.toBase58());
// derive the tree's authority (PDA), owned by Bubblegum
const [treeAuthority, _bump] = PublicKey.findProgramAddressSync(
[treeKeypair.publicKey.toBuffer()],
BUBBLEGUM_PROGRAM_ID,
);
console.log("treeAuthority:", treeAuthority.toBase58());
// allocate the tree's account on chain with the `space`
// NOTE: this will compute the space needed to store the tree on chain (and the lamports required to store it)
const allocTreeIx = await createAllocTreeIx(
connection,
treeKeypair.publicKey,
payer.publicKey,
maxDepthSizePair,
canopyDepth,
);
// create the instruction to actually create the tree
const createTreeIx = createCreateTreeInstruction(
{
payer: payer.publicKey,
treeCreator: payer.publicKey,
treeAuthority,
merkleTree: treeKeypair.publicKey,
compressionProgram: SPL_ACCOUNT_COMPRESSION_PROGRAM_ID,
// NOTE: this is used for some on chain logging
logWrapper: SPL_NOOP_PROGRAM_ID,
},
{
maxBufferSize: maxDepthSizePair.maxBufferSize,
maxDepth: maxDepthSizePair.maxDepth,
public: false,
},
BUBBLEGUM_PROGRAM_ID,
);
try {
// create and send the transaction to initialize the tree
const tx = new Transaction().add(allocTreeIx).add(createTreeIx);
tx.feePayer = payer.publicKey;
// send the transaction
const txSignature = await sendAndConfirmTransaction(
connection,
tx,
// ensuring the `treeKeypair` PDA and the `payer` are BOTH signers
[treeKeypair, payer],
{
commitment: "confirmed",
skipPreflight: true,
},
);
console.log("\nMerkle tree created successfully!");
// return useful info
return { treeAuthority, treeAddress: treeKeypair.publicKey };
} catch (err: any) {
console.error("\nFailed to create merkle tree:", err);
throw err;
}
}
async function createCollection(
connection: Connection,
payer: Keypair,
metadataV3: CreateMetadataAccountArgsV3,
) {
// create and initialize the SPL token mint
console.log("Creating the collection's mint...");
const mint = await createMint(
connection,
payer,
// mint authority
payer.publicKey,
// freeze authority
payer.publicKey,
// decimals - use `0` for NFTs since they are non-fungible
0,
);
console.log("Mint address:", mint.toBase58());
// create the token account
console.log("Creating a token account...");
const tokenAccount = await createAccount(
connection,
payer,
mint,
payer.publicKey,
undefined,
{
skipPreflight: true
}
);
console.log("Token account:", tokenAccount.toBase58());
// mint 1 token ()
console.log("Minting 1 token for the collection...");
const mintSig = await mintTo(
connection,
payer,
mint,
tokenAccount,
payer,
// mint exactly 1 token
1,
// no `multiSigners`
[],
{
skipPreflight: true
},
TOKEN_PROGRAM_ID,
);
// derive the PDA for the metadata account
const [metadataAccount, _bump] = PublicKey.findProgramAddressSync(
[Buffer.from("metadata", "utf8"), TOKEN_METADATA_PROGRAM_ID.toBuffer(), mint.toBuffer()],
TOKEN_METADATA_PROGRAM_ID,
);
console.log("Metadata account:", metadataAccount.toBase58());
// create an instruction to create the metadata account
const createMetadataIx = createCreateMetadataAccountV3Instruction(
{
metadata: metadataAccount,
mint: mint,
mintAuthority: payer.publicKey,
payer: payer.publicKey,
updateAuthority: payer.publicKey,
},
{
createMetadataAccountArgsV3: metadataV3,
},
);
// derive the PDA for the metadata account
const [masterEditionAccount, _bump2] = PublicKey.findProgramAddressSync(
[
Buffer.from("metadata", "utf8"),
TOKEN_METADATA_PROGRAM_ID.toBuffer(),
mint.toBuffer(),
Buffer.from("edition", "utf8"),
],
TOKEN_METADATA_PROGRAM_ID,
);
console.log("Master edition account:", masterEditionAccount.toBase58());
// create an instruction to create the metadata account
const createMasterEditionIx = createCreateMasterEditionV3Instruction(
{
edition: masterEditionAccount,
mint: mint,
mintAuthority: payer.publicKey,
payer: payer.publicKey,
updateAuthority: payer.publicKey,
metadata: metadataAccount,
},
{
createMasterEditionArgs: {
maxSupply: 0,
},
},
);
// create the collection size instruction
const collectionSizeIX = createSetCollectionSizeInstruction(
{
collectionMetadata: metadataAccount,
collectionAuthority: payer.publicKey,
collectionMint: mint,
},
{
setCollectionSizeArgs: { size: 50 },
},
);
try {
// construct the transaction with our instructions, making the `payer` the `feePayer`
const tx = new Transaction()
.add(createMetadataIx)
.add(createMasterEditionIx)
.add(collectionSizeIX);
tx.feePayer = payer.publicKey;
// send the transaction to the cluster
const txSignature = await sendAndConfirmTransaction(connection, tx, [payer], {
commitment: "confirmed",
skipPreflight: true,
});
console.log("\nCollection successfully created!");
} catch (err) {
console.error("\nFailed to create collection:", err);
throw err;
}
// return all the accounts
return { mint, tokenAccount, metadataAccount, masterEditionAccount };
}
async function mintCompressedNFT(
connection: Connection,
payer: Keypair,
treeAddress: PublicKey,
collectionMint: PublicKey,
collectionMetadata: PublicKey,
collectionMasterEditionAccount: PublicKey,
compressedNFTMetadata: MetadataArgs,
receiverAddress?: PublicKey,
) {
// derive the tree's authority (PDA), owned by Bubblegum
const [treeAuthority, _bump] = PublicKey.findProgramAddressSync(
[treeAddress.toBuffer()],
BUBBLEGUM_PROGRAM_ID,
);
// derive a PDA (owned by Bubblegum) to act as the signer of the compressed minting
const [bubblegumSigner, _bump2] = PublicKey.findProgramAddressSync(
// `collection_cpi` is a custom prefix required by the Bubblegum program
[Buffer.from("collection_cpi", "utf8")],
BUBBLEGUM_PROGRAM_ID,
);
// create an array of instruction, to mint multiple compressed NFTs at once
const mintIxs: TransactionInstruction[] = [];
/*
Add a single mint instruction
---
But you could all multiple in the same transaction, as long as your
transaction is still within the byte size limits
*/
mintIxs.push(
createMintToCollectionV1Instruction(
{
payer: payer.publicKey,
merkleTree: treeAddress,
treeAuthority,
treeDelegate: payer.publicKey,
// set the receiver of the NFT
leafOwner: receiverAddress || payer.publicKey,
// set a delegated authority over this NFT
leafDelegate: payer.publicKey,
/*
You can set any delegate address at mint, otherwise should
normally be the same as `leafOwner`
NOTE: the delegate will be auto cleared upon NFT transfer
---
in this case, we are setting the payer as the delegate
*/
// collection details
collectionAuthority: payer.publicKey,
collectionAuthorityRecordPda: BUBBLEGUM_PROGRAM_ID,
collectionMint: collectionMint,
collectionMetadata: collectionMetadata,
editionAccount: collectionMasterEditionAccount,
// other accounts
compressionProgram: SPL_ACCOUNT_COMPRESSION_PROGRAM_ID,
logWrapper: SPL_NOOP_PROGRAM_ID,
bubblegumSigner: bubblegumSigner,
tokenMetadataProgram: TOKEN_METADATA_PROGRAM_ID,
},
{
metadataArgs: Object.assign(compressedNFTMetadata, {
collection: { key: collectionMint, verified: false },
}),
},
),
);
try {
// construct the transaction with our instructions, making the `payer` the `feePayer`
const tx = new Transaction().add(...mintIxs);
tx.feePayer = payer.publicKey;
// send the transaction to the cluster
const txSignature = await sendAndConfirmTransaction(connection, tx, [payer], {
commitment: "confirmed",
skipPreflight: true,
});
console.log("\nSuccessfully minted the compressed NFT!");
return txSignature;
} catch (err) {
console.error("\nFailed to mint compressed NFT:", err);
throw err;
}
}
type JsonRpcParams<ReadApiMethodParams> = {
method: string;
id?: string;
params: ReadApiMethodParams;
};
type JsonRpcOutput<ReadApiJsonOutput> = {
result: ReadApiJsonOutput;
};
const callReadApi = async <ReadApiMethodParams, ReadApiJsonOutput>(
jsonRpcParams: JsonRpcParams<ReadApiMethodParams>,
): Promise<JsonRpcOutput<ReadApiJsonOutput>> => {
const response = await fetch(READ_API_URL, {
method: "POST",
headers: {
"Content-Type": "application/json",
},
body: JSON.stringify({
jsonrpc: "2.0",
method: jsonRpcParams.method,
id: jsonRpcParams.id ?? "rpd-op-123",
params: jsonRpcParams.params,
}),
});
return await response.json();
};
type ReadApiPropGroupKey = "collection";
type ReadApiPropSortBy = "created" | "updated" | "recent_action";
type ReadApiPropSortDirection = "asc" | "desc";
type ReadApiParamAssetSortBy = {
sortBy: ReadApiPropSortBy;
sortDirection: ReadApiPropSortDirection;
};
type GetAssetsByGroupRpcInput = {
groupKey: ReadApiPropGroupKey;
groupValue: string;
page?: Option<number>;
limit?: Option<number>;
/* assetId to search before */
before?: Option<string>;
/* assetId to search after */
after?: Option<string>;
sortBy?: Option<ReadApiParamAssetSortBy>;
};
type ReadApiAssetInterface =
| "V1_NFT"
| "V1_PRINT"
| "LEGACY_NFT"
| "V2_NFT"
| "FungibleAsset"
| "Custom"
| "Identity"
| "Executable"
| "ProgrammableNFT";
type ReadApiOwnershipMetadata = {
frozen: boolean;
delegated: boolean;
delegate: string | null;
owner: string;
ownership_model: "single" | "token";
};
type ReadApiAuthorityScope = "full";
type ReadApiAssetAuthority = {
address: string;
scopes: ReadApiAuthorityScope[];
};
type ReadApiAssetContent = {
json_uri: string;
metadata: Metadata["json"];
};
type ReadApiAssetRoyaltyMetadata = {
primary_sale_happened: boolean;
basis_points: number;
};
type ReadApiAssetSupplyMetadata = {
edition_nonce: number;
print_current_supply: number;
print_max_supply: number;
};
type ReadApiAssetGrouping = {
group_key: ReadApiPropGroupKey;
group_value: string;
};
type ReadApiCompressionMetadata = {
eligible: boolean;
compressed: boolean;
data_hash: string;
creator_hash: string;
asset_hash: string;
tree: string;
seq: number;
leaf_id: number;
};
type ReadApiAsset = {
/**
* The asset Id
*/
id: string;
interface: ReadApiAssetInterface;
ownership: ReadApiOwnershipMetadata;
mutable: boolean;
authorities: Array<ReadApiAssetAuthority>;
content: ReadApiAssetContent;
royalty: ReadApiAssetRoyaltyMetadata;
supply: ReadApiAssetSupplyMetadata;
creators: Metadata["creators"];
grouping: Array<ReadApiAssetGrouping>;
compression: ReadApiCompressionMetadata;
};
type ReadApiRpcResponseError = {
error: string;
id: string;
};
type ReadApiAssetList = {
total: number;
limit: number;
/**
* listing of individual assets, and their associated metadata
*/
items: Array<ReadApiAsset>;
/**
* `page` is only provided when using page based pagination, as apposed
* to asset id before/after based pagination
*/
page: Option<number>;
/**
* asset Id searching before
*/
before: Option<string>;
/**
* asset Id searching after
*/
after: Option<string>;
/**
* listing of errors provided by the ReadApi RPC
*/
errors: Option<ReadApiRpcResponseError[]>;
};
class ReadApiError extends MetaplexError {
constructor(message: string, cause?: Error) {
super({ key: "", title: message, problem: "", solution: "", source: "rpc", options: { cause } });
}
}
const getAssetsByGroup = async ({
groupKey,
groupValue,
page,
limit,
sortBy,
before,
after,
}: GetAssetsByGroupRpcInput): Promise<ReadApiAssetList> => {
// `page` cannot be supplied with `before` or `after`
if (typeof page == "number" && (before || after))
throw new ReadApiError(
"Pagination Error. Only one pagination parameter supported per query.",
);
// a pagination method MUST be selected
if (typeof page != "number" || before || after)
throw new ReadApiError("Pagination Error. No Pagination Method Selected.");
const { result } = await callReadApi<GetAssetsByGroupRpcInput, ReadApiAssetList>({
method: "getAssetsByGroup",
params: {
groupKey,
groupValue,
after: after ?? null,
before: before ?? null,
limit: limit ?? null,
page: page ?? 0,
sortBy: sortBy ?? null,
},
});
if (!result) throw new ReadApiError("No results returned");
return result;
}
type GetAssetProofRpcInput = {
id: string;
};
type GetAssetProofRpcResponse = {
root: string;
proof: string[];
node_index: number;
leaf: string;
tree_id: string;
};
// Asset id can be calculated via Bubblegum#getLeafAssetId
// It is a PDA with the following seeds: ["asset", tree, leafIndex]
const getAssetProof = async (assetId: PublicKey): Promise<GetAssetProofRpcResponse> => {
const { result: proof } = await callReadApi<
GetAssetProofRpcInput,
GetAssetProofRpcResponse
>({
method: "getAssetProof",
params: {
id: assetId.toBase58(),
},
});
if (!proof) throw new ReadApiError("No asset proof returned");
return proof;
}
////////////////////////////////////////////////////////////////////////////////////////////////////
const sleep = async (timeMs: number) => {
return new Promise((resolve) => {
setTimeout(resolve, timeMs);
});
}
const waitGroupFn = <T>(
fn: (...args: any[]) => Promise<T>,
sleepTimeMs: number,
maxWaitGroupSize: number
): (...args: any[]) => Promise<T> => {
let waitGroupSize = 0;
return async function (...args: any[]): Promise<T> {
const executeFn = async () => {
waitGroupSize++;
try {
return await fn(...args)
} finally {
waitGroupSize--;
}
};
while (true) {
if (waitGroupSize >= maxWaitGroupSize) {
await sleep(sleepTimeMs);
} else {
return await executeFn();
}
}
};
}
const rateControlFn = <T>(
fn: (...args: any[]) => Promise<T>,
minInterval: number
) => {
let lastCall: number = 0;
return async function (...args: any[]): Promise<T> {
const executeFn = async () => {
lastCall = Date.now();
return await fn(...args)
};
while (true) {
const timeSinceLastCall = Date.now() - lastCall;
if (timeSinceLastCall >= minInterval) {
return await executeFn();
} else {
await sleep(minInterval - timeSinceLastCall);
}
}
};
}
const MAX_REQUESTS_PER_SECOND = 10
const rateControlMint = rateControlFn(mintCompressedNFT, 1_000 / MAX_REQUESTS_PER_SECOND);
const wgRateControlMint = waitGroupFn(rateControlMint, 1_000, 1_000)
const wgGetAssetProof = waitGroupFn(getAssetProof, 1_000, 1_000)
const main = async () => {
const treeKeypair = Keypair.generate();
const connection = new Connection(RPC_ENDPOINT, "confirmed");
const ownerKeypair = loadKeypairFromFile(KEYPAIR_FILE_PATH)
const maxDepthSizePair: ValidDepthSizePair = {
maxDepth: 14,
maxBufferSize: 64,
};
const canopyDepth = maxDepthSizePair.maxDepth - 3;
const tree = await createTree(
connection,
ownerKeypair,
treeKeypair,
maxDepthSizePair,
canopyDepth
);
const treeAddress = tree.treeAddress.toBase58()
const collectionMetadataV3: CreateMetadataAccountArgsV3 = {
data: {
name: "Test Collection",
symbol: "cTEST",
uri: "https://arweave.net/fwbGMeKWfl4rBmXdyKkFjMy8SEDEiSWo-00xAN8-KRA",
sellerFeeBasisPoints: 0,
creators: [
{
address: ownerKeypair.publicKey,
verified: false,
share: 100,
},
],
collection: null,
uses: null,
},
isMutable: false,
collectionDetails: null,
};
// create a full token mint and initialize the collection (with the `payer` as the authority)
const collection = await createCollection(connection, ownerKeypair, collectionMetadataV3);
const collectionMint = collection.mint
const [collectionMetadataAccount, _bump] = PublicKey.findProgramAddressSync(
[Buffer.from("metadata", "utf8"), TOKEN_METADATA_PROGRAM_ID.toBuffer(), collectionMint.toBuffer()],
TOKEN_METADATA_PROGRAM_ID,
);
const [collectionMasterEditionAccount, _bump2] = PublicKey.findProgramAddressSync(
[
Buffer.from("metadata", "utf8"),
TOKEN_METADATA_PROGRAM_ID.toBuffer(),
collectionMint.toBuffer(),
Buffer.from("edition", "utf8"),
],
TOKEN_METADATA_PROGRAM_ID,
);
const compressedNFTMetadata: MetadataArgs = {
name: "Test Compressed NFT",
symbol: "cTEST",
uri: "https://arweave.net/fwbGMeKWfl4rBmXdyKkFjMy8SEDEiSWo-00xAN8-KRA",
creators: [
{
address: ownerKeypair.publicKey,
verified: false,
share: 100,
},
],
editionNonce: 0,
uses: null,
collection: null,
primarySaleHappened: true,
sellerFeeBasisPoints: 0,
isMutable: false,
tokenProgramVersion: TokenProgramVersion.Original,
tokenStandard: TokenStandard.NonFungible,
};
let number = 0;
const promises = Array.from(Array(10_000).keys()).map(async () => {
while (true) {
try {
await wgRateControlMint(
connection,
ownerKeypair,
new PublicKey(treeAddress),
collectionMint,
collectionMetadataAccount,
collectionMasterEditionAccount,
compressedNFTMetadata,
Keypair.generate().publicKey
)
break
} catch (e) {
console.log('error minting, retrying in one second', e)
await sleep(1_000)
}
}
number++
console.log('finished minting', number)
})
await Promise.all(promises)
console.log('done minting all NFTs')
let assets: ReadApiAsset[] = [];
let page = 1
const limit = 1000
while (true) {
const pagedAssets = await getAssetsByGroup(
{
groupKey: "collection",
groupValue: collectionMint.toBase58(),
page: page,
limit
}
)
assets.push(...pagedAssets.items)
if (pagedAssets.items.length < limit) {
break
}
page++
}
const allTreeAssets = assets.filter(asset => asset.compression.tree === treeAddress)
console.log('total assets in tree', allTreeAssets.length)
let totalFail = 0;
let totalEmpty = 0;
let finished = 0;
let totalFetchFail = 0;
let success = 0;
const checkPromises = allTreeAssets.map(async (asset) => {
let assetProof: GetAssetProofRpcResponse
try {
assetProof = await wgGetAssetProof(new PublicKey(asset.id))
} catch (e) {
totalFetchFail++
return
} finally {
finished++
}
if (assetProof.proof.length === 0 || assetProof.proof[0] === '') {
totalEmpty++
return
}
const merkleTreeProof: MerkleTreeProof = {
leafIndex: asset.compression.leaf_id,
leaf: new PublicKey(assetProof.leaf).toBuffer(),
root: new PublicKey(assetProof.root).toBuffer(),
proof: assetProof.proof.map((node: string) =>
new PublicKey(node).toBuffer()
),
};
const rpcRoot = new PublicKey(assetProof.root).toBuffer();
const isVerified = MerkleTree.verify(rpcRoot, merkleTreeProof, false);
if (!isVerified) {
totalFail++
} else {
success++
}
})
await Promise.all(checkPromises)
console.log(totalFail, 'out of', finished, 'proofs failed verification')
console.log(totalEmpty, 'out of', finished, 'proofs were empty')
console.log(totalFetchFail, 'out of', finished, 'proofs failed to fetch')
console.log(success, 'out of', finished, 'proofs were valid proofs')
}
main()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment