Skip to content

Commit

Permalink
Merge branch 'main' of github.com:scaffold-eth/create-eth into extens…
Browse files Browse the repository at this point in the history
…ion-github-url
  • Loading branch information
rin-st committed Dec 27, 2024
2 parents 5666726 + 827350e commit c4080c3
Show file tree
Hide file tree
Showing 9 changed files with 96 additions and 112 deletions.
8 changes: 8 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,13 @@
# create-eth

## 0.0.65

### Patch Changes

- 8c4e925: foundry: avoid extra compilation (https://github.com/scaffold-eth/scaffold-eth-2/pull/1020)
- 38d85c0: up burner-connector version (https://github.com/scaffold-eth/scaffold-eth-2/pull/1021)
- aed3345: cli: fix merge package.json files for extensions with both solidity-frameworks

## 0.0.64

### Patch Changes
Expand Down
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "create-eth",
"version": "0.0.64",
"version": "0.0.65",
"description": "Create a Scaffold-ETH-2 app",
"repository": {
"type": "git",
Expand Down
6 changes: 6 additions & 0 deletions src/extensions.json
Original file line number Diff line number Diff line change
Expand Up @@ -40,5 +40,11 @@
"description": "This extension shows how to use on-chain randomness using RANDAO for truly on-chain unpredictable random sources.",
"repository": "https://github.com/scaffold-eth/create-eth-extensions",
"branch": "randao"
},
{
"extensionFlagValue": "erc-721",
"description": "This extension introduces an ERC-721 token contract and demonstrates how to use it, including getting the total supply and holder balance, listing all NFTs from the collection and NFTs from the connected address, and how to transfer NFTs.",
"repository": "https://github.com/scaffold-eth/create-eth-extensions",
"branch": "erc-721"
}
]
28 changes: 23 additions & 5 deletions src/tasks/copy-template-files.ts
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,19 @@ const copyBaseFiles = async (basePath: string, targetDir: string, { dev: isDev }
}
};

const isUnselectedSolidityFrameworkFile = ({
path,
solidityFramework,
}: {
path: string;
solidityFramework: SolidityFramework | null;
}) => {
const unselectedSolidityFrameworks = [SOLIDITY_FRAMEWORKS.FOUNDRY, SOLIDITY_FRAMEWORKS.HARDHAT].filter(
sf => sf !== solidityFramework,
);
return unselectedSolidityFrameworks.map(sf => new RegExp(`${sf}`)).some(sfregex => sfregex.test(path));
};

const copyExtensionFiles = async (
{ dev: isDev, solidityFramework }: Options,
extensionPath: string,
Expand Down Expand Up @@ -101,11 +114,7 @@ const copyExtensionFiles = async (
const isTemplate = isTemplateRegex.test(path);
const isPackageJson = isPackageJsonRegex.test(path);

const unselectedSolidityFrameworks = [SOLIDITY_FRAMEWORKS.FOUNDRY, SOLIDITY_FRAMEWORKS.HARDHAT].filter(
sf => sf !== solidityFramework,
);
const isUnselectedSolidityFrameworksRegexes = unselectedSolidityFrameworks.map(sf => new RegExp(`${sf}$`));
const isUnselectedSolidityFramework = isUnselectedSolidityFrameworksRegexes.some(sfregex => sfregex.test(path));
const isUnselectedSolidityFramework = isUnselectedSolidityFrameworkFile({ path, solidityFramework });

const shouldSkip = isArgs || isTemplate || isPackageJson || isUnselectedSolidityFramework;

Expand All @@ -116,6 +125,15 @@ const copyExtensionFiles = async (
// copy each package's package.json
const extensionPackages = fs.readdirSync(extensionPackagesPath);
extensionPackages.forEach(packageName => {
const isUnselectedSolidityFramework = isUnselectedSolidityFrameworkFile({
path: path.join(targetDir, "packages", packageName, "package.json"),
solidityFramework,
});

if (isUnselectedSolidityFramework) {
return;
}

mergePackageJson(
path.join(targetDir, "packages", packageName, "package.json"),
path.join(extensionPath, "packages", packageName, "package.json"),
Expand Down
2 changes: 1 addition & 1 deletion templates/base/packages/nextjs/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
"@uniswap/sdk-core": "~5.8.2",
"@uniswap/v2-sdk": "~4.6.1",
"blo": "~1.2.0",
"burner-connector": "~0.0.8",
"burner-connector": "0.0.9",
"daisyui": "4.12.10",
"next": "~14.2.11",
"next-nprogress-bar": "~2.3.13",
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,7 @@
.PHONY: build deploy generate-abis verify-keystore account chain compile deploy-verify flatten fork format lint test verify

import { withDefaults } from "../../../../utils.js";

const content = ({ recipes, postDeployRecipeToRun }) => `.PHONY: build deploy generate-abis verify-keystore account chain compile flatten fork format lint test verify
DEPLOY_SCRIPT ?= script/Deploy.s.sol
Expand All @@ -14,11 +17,7 @@ chain: setup-anvil-wallet
# Start a fork
fork: setup-anvil-wallet
anvil --fork-url ${FORK_URL} --chain-id 31337

# Build the project
build:
forge build --via-ir --build-info --build-info-path out/build-info/
anvil --fork-url \${FORK_URL} --chain-id 31337
# Deploy the contracts
deploy:
Expand All @@ -28,16 +27,16 @@ deploy:
fi
@if [ "$(RPC_URL)" = "localhost" ]; then \
if [ "$(ETH_KEYSTORE_ACCOUNT)" = "scaffold-eth-default" ]; then \
forge script $(DEPLOY_SCRIPT) --rpc-url localhost --password localhost --broadcast --legacy --ffi; \
forge script $(DEPLOY_SCRIPT) --rpc-url localhost --password localhost --broadcast --via-ir --legacy --ffi; \
else \
forge script $(DEPLOY_SCRIPT) --rpc-url localhost --broadcast --legacy --ffi; \
forge script $(DEPLOY_SCRIPT) --rpc-url localhost --broadcast --legacy --via-ir --ffi; \
fi \
else \
forge script $(DEPLOY_SCRIPT) --rpc-url $(RPC_URL) --broadcast --legacy --ffi; \
forge script $(DEPLOY_SCRIPT) --rpc-url $(RPC_URL) --broadcast --legacy --via-ir --ffi; \
fi
# Build and deploy target
build-and-deploy: build deploy generate-abis
# Deploy and generate ABIs
deploy-and-generate-abis: deploy generate-abis ${postDeployRecipeToRun.filter(Boolean).join(" ")}
# Generate TypeScript ABIs
generate-abis:
Expand All @@ -61,29 +60,12 @@ account-generate:
# Import an existing account
account-import:
@cast wallet import ${ACCOUNT_NAME} --interactive
@cast wallet import \${ACCOUNT_NAME} --interactive
# Compile contracts
compile:
forge compile
# Deploy and verify
deploy-verify:
@if [ ! -f "$(DEPLOY_SCRIPT)" ]; then \
echo "Error: Deploy script '$(DEPLOY_SCRIPT)' not found"; \
exit 1; \
fi
@if [ "$(RPC_URL)" = "localhost" ]; then \
if [ "$(ETH_KEYSTORE_ACCOUNT)" = "scaffold-eth-default" ]; then \
forge script $(DEPLOY_SCRIPT) --rpc-url localhost --password localhost --broadcast --legacy --ffi --verify; \
else \
forge script $(DEPLOY_SCRIPT) --rpc-url localhost --broadcast --legacy --ffi --verify; \
fi \
else \
forge script $(DEPLOY_SCRIPT) --rpc-url $(RPC_URL) --broadcast --legacy --ffi --verify; \
fi
node scripts-js/generateTsAbis.js

# Flatten contracts
flatten:
forge flatten
Expand All @@ -100,5 +82,10 @@ lint:
verify:
forge script script/VerifyAll.s.sol --ffi --rpc-url $(RPC_URL)
build-and-verify: build verify
${recipes.filter(Boolean).join("\n")}`


export default withDefaults(content, {
recipes: ``,
postDeployRecipeToRun: ``,
});
31 changes: 15 additions & 16 deletions templates/solidity-frameworks/foundry/packages/foundry/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -3,21 +3,20 @@
"version": "0.0.1",
"type": "module",
"scripts": {
"account": "make account",
"account:generate": "make account-generate ACCOUNT_NAME=${1:-scaffold-eth-custom}",
"account:import": "make account-import ACCOUNT_NAME=${1:-scaffold-eth-custom}",
"chain": "make chain",
"compile": "make compile",
"deploy": "node scripts-js/parseArgs.js",
"deploy:verify": "node scripts/parseArgs.js --verify",
"flatten": "make flatten",
"fork": "make fork FORK_URL=${1:-mainnet}",
"format": "make format",
"lint": "make lint",
"postinstall": "shx cp -n .env.example .env",
"test": "forge test",
"verify": "make build-and-verify RPC_URL=${1:-localhost}",
"verify-keystore": "make verify-keystore"
"verify-keystore": "make verify-keystore",
"account": "make account",
"account:generate": "make account-generate ACCOUNT_NAME=${1:-scaffold-eth-custom}",
"account:import": "make account-import ACCOUNT_NAME=${1:-scaffold-eth-custom}",
"chain": "make chain",
"compile": "make compile",
"deploy": "node scripts-js/parseArgs.js",
"flatten": "make flatten",
"fork": "make fork FORK_URL=${1:-mainnet}",
"format": "make format",
"lint": "make lint",
"test": "forge test",
"verify": "make verify RPC_URL=${1:-localhost}",
"postinstall": "shx cp -n .env.example .env"
},
"dependencies": {
"dotenv": "~16.3.1",
Expand All @@ -28,6 +27,6 @@
"toml": "~3.0.0"
},
"devDependencies": {
"shx": "~0.3.4"
"shx": "^0.3.4"
}
}
Original file line number Diff line number Diff line change
@@ -1,11 +1,4 @@
import {
readdirSync,
statSync,
readFileSync,
existsSync,
mkdirSync,
writeFileSync,
} from "fs";
import { readdirSync, statSync, readFileSync, existsSync, mkdirSync, writeFileSync } from "fs";
import { join, dirname } from "path";
import { fileURLToPath } from "url";
import { format } from "prettier";
Expand All @@ -19,6 +12,10 @@ const generatedContractComment = `
*/`;

function getDirectories(path) {
if (!existsSync(path)) {
return [];
}

return readdirSync(path).filter(function (file) {
return statSync(join(path, file)).isDirectory();
});
Expand Down Expand Up @@ -47,12 +44,7 @@ function getDeploymentHistory(broadcastPath) {

// Sort files to process them in chronological order
const runFiles = files
.filter(
(file) =>
file.startsWith("run-") &&
file.endsWith(".json") &&
!file.includes("run-latest")
)
.filter(file => file.startsWith("run-") && file.endsWith(".json") && !file.includes("run-latest"))
.sort((a, b) => {
// Extract run numbers and compare them
const runA = parseInt(a.match(/run-(\d+)/)?.[1] || "0");
Expand Down Expand Up @@ -80,17 +72,11 @@ function getDeploymentHistory(broadcastPath) {
}

function getArtifactOfContract(contractName) {
const current_path_to_artifacts = join(
__dirname,
"..",
`out/${contractName}.sol`
);
const current_path_to_artifacts = join(__dirname, "..", `out/${contractName}.sol`);

if (!existsSync(current_path_to_artifacts)) return null;

const artifactJson = JSON.parse(
readFileSync(`${current_path_to_artifacts}/${contractName}.json`)
);
const artifactJson = JSON.parse(readFileSync(`${current_path_to_artifacts}/${contractName}.json`));

return artifactJson;
}
Expand All @@ -101,9 +87,7 @@ function getInheritedFromContracts(artifact) {
for (const astNode of artifact.ast.nodes) {
if (astNode.nodeType == "ContractDefinition") {
if (astNode.baseContracts.length > 0) {
inheritedFromContracts = astNode.baseContracts.map(
({ baseName }) => baseName.name
);
inheritedFromContracts = astNode.baseContracts.map(({ baseName }) => baseName.name);
}
}
}
Expand Down Expand Up @@ -135,25 +119,20 @@ function processAllDeployments(broadcastPath) {
const scriptFolders = getDirectories(broadcastPath);
const allDeployments = new Map();

scriptFolders.forEach((scriptFolder) => {
scriptFolders.forEach(scriptFolder => {
const scriptPath = join(broadcastPath, scriptFolder);
const chainFolders = getDirectories(scriptPath);

chainFolders.forEach((chainId) => {
chainFolders.forEach(chainId => {
const chainPath = join(scriptPath, chainId);
const deploymentHistory = getDeploymentHistory(chainPath);

deploymentHistory.forEach((deployment) => {
const timestamp = parseInt(
deployment.deploymentFile.match(/run-(\d+)/)?.[1] || "0"
);
deploymentHistory.forEach(deployment => {
const timestamp = parseInt(deployment.deploymentFile.match(/run-(\d+)/)?.[1] || "0");
const key = `${chainId}-${deployment.contractName}`;

// Only update if this deployment is newer
if (
!allDeployments.has(key) ||
timestamp > allDeployments.get(key).timestamp
) {
if (!allDeployments.has(key) || timestamp > allDeployments.get(key).timestamp) {
allDeployments.set(key, {
...deployment,
timestamp,
Expand All @@ -167,7 +146,7 @@ function processAllDeployments(broadcastPath) {

const allContracts = {};

allDeployments.forEach((deployment) => {
allDeployments.forEach(deployment => {
const { chainId, contractName } = deployment;
const artifact = getArtifactOfContract(contractName);

Expand Down Expand Up @@ -197,19 +176,15 @@ function main() {
const deployments = {};

// Load existing deployments from deployments directory
Deploymentchains.forEach((chain) => {
Deploymentchains.forEach(chain => {
if (!chain.endsWith(".json")) return;
chain = chain.slice(0, -5);
var deploymentObject = JSON.parse(
readFileSync(`${current_path_to_deployments}/${chain}.json`)
);
var deploymentObject = JSON.parse(readFileSync(`${current_path_to_deployments}/${chain}.json`));
deployments[chain] = deploymentObject;
});

// Process all deployments from all script folders
const allGeneratedContracts = processAllDeployments(
current_path_to_broadcast
);
const allGeneratedContracts = processAllDeployments(current_path_to_broadcast);

// Update contract keys based on deployments if they exist
Object.entries(allGeneratedContracts).forEach(([chainId, contracts]) => {
Expand All @@ -231,19 +206,12 @@ function main() {
}

// Generate the deployedContracts content
const fileContent = Object.entries(allGeneratedContracts).reduce(
(content, [chainId, chainConfig]) => {
return `${content}${parseInt(chainId).toFixed(0)}:${JSON.stringify(
chainConfig,
null,
2
)},`;
},
""
);
const fileContent = Object.entries(allGeneratedContracts).reduce((content, [chainId, chainConfig]) => {
return `${content}${parseInt(chainId).toFixed(0)}:${JSON.stringify(chainConfig, null, 2)},`;
}, "");

// Write the files
const fileTemplate = (importPath) => `
const fileTemplate = importPath => `
${generatedContractComment}
import { GenericContractsDeclaration } from "${importPath}";
Expand All @@ -256,12 +224,10 @@ function main() {
`${NEXTJS_TARGET_DIR}deployedContracts.ts`,
format(fileTemplate("~~/utils/scaffold-eth/contract"), {
parser: "typescript",
})
}),
);

console.log(
`📝 Updated TypeScript contract definition file on ${NEXTJS_TARGET_DIR}deployedContracts.ts`
);
console.log(`📝 Updated TypeScript contract definition file on ${NEXTJS_TARGET_DIR}deployedContracts.ts`);
}

try {
Expand Down
Loading

0 comments on commit c4080c3

Please sign in to comment.