import type { CompileCache } from "./cache.js";
import type { DependencyGraphImplementation } from "./dependency-graph.js";
import type { Artifact } from "../../../../types/artifacts.js";
import type {
  SolidityCompilerConfig,
  SolcSolidityCompilerConfig,
  SolidityConfig,
} from "../../../../types/config.js";
import type { HookManager } from "../../../../types/hooks.js";
import type {
  SolidityBuildSystem,
  BuildOptions,
  CompilationJobCreationError,
  FileBuildResult,
  GetCompilationJobsOptions,
  CompileBuildInfoOptions,
  RunCompilationJobOptions,
  GetCompilationJobsResult,
  EmitArtifactsResult,
  RunCompilationJobResult,
  BuildScope,
  CacheHitInfo,
} from "../../../../types/solidity/build-system.js";
import type {
  ToolVersions,
  CompilationJob,
  Compiler,
  CompilerOutput,
  CompilerOutputError,
  DependencyGraph,
  SolidityBuildInfo,
} from "../../../../types/solidity.js";

import os from "node:os";
import path from "node:path";
import { styleText } from "node:util";

import {
  assertHardhatInvariant,
  HardhatError,
} from "@nomicfoundation/hardhat-errors";
import { createDebug } from "@nomicfoundation/hardhat-utils/debug";
import {
  exists,
  ensureDir,
  getAllDirectoriesMatching,
  getAllFilesMatching,
  move,
  readJsonFile,
  remove,
  writeJsonFile,
  writeJsonFileAsStream,
  writeUtf8File,
  readdirOrEmpty,
} from "@nomicfoundation/hardhat-utils/fs";
import { shortenPath } from "@nomicfoundation/hardhat-utils/path";
import { createSpinner } from "@nomicfoundation/hardhat-utils/spinner";
import { pluralize } from "@nomicfoundation/hardhat-utils/string";
import pMap from "p-map";

import { FileBuildResultType } from "../../../../types/solidity/build-system.js";
import { getHardhatVersion } from "../../../utils/package.js";
import { DEFAULT_BUILD_PROFILE } from "../build-profiles.js";
import { getSolcCompilerForConfig } from "../solidity-hooks.js";

import {
  getArtifactsDeclarationFile,
  getBuildInfo,
  getBuildInfoOutput,
  getContractArtifact,
  getDuplicatedContractNamesDeclarationFile,
} from "./artifacts.js";
import { loadCache, saveCache } from "./cache.js";
import { sortCompilationJobsByDescendingCost } from "./compilation-job-cost.js";
import { CompilationJobImplementation } from "./compilation-job.js";
import { downloadSolcCompilers, getCompiler } from "./compiler/index.js";
import { buildDependencyGraph } from "./dependency-graph-building.js";
import { readSourceFileFactory } from "./read-source-file.js";
import {
  formatRootPath,
  isNpmParsedRootPath,
  npmModuleToNpmRootPath,
  parseRootPath,
} from "./root-paths-utils.js";
import { SolcConfigSelector } from "./solc-config-selection.js";
import { shouldSuppressWarning } from "./warning-suppression.js";

const log = createDebug("hardhat:core:solidity:build-system");

/**
 * Returns true if the given compiler config is a SolcSolidityCompilerConfig.
 */
export function isSolcSolidityCompilerConfig(
  config: SolidityCompilerConfig,
): config is SolcSolidityCompilerConfig {
  return config.type === undefined || config.type === "solc";
}

// Compiler warnings to suppress from build output.
// Each rule specifies a warning message and the source file it applies to.
// This allows suppressing known warnings from internal files (e.g., console.sol)
// while still showing the same warning type from user code.
export const SUPPRESSED_WARNINGS: Array<{
  message: string;
  sourceFile: string;
}> = [
  {
    message:
      "Natspec memory-safe-assembly special comment for inline assembly is deprecated and scheduled for removal. Use the memory-safe block annotation instead.",
    sourceFile: path.normalize("hardhat/console.sol"),
  },
];

interface CompilationResult {
  compilationJob: CompilationJob;
  compilerOutput: CompilerOutput;
  compiler: Compiler;
}

export interface SolidityBuildSystemOptions {
  readonly solidityConfig: SolidityConfig;
  readonly projectRoot: string;
  readonly soliditySourcesPaths: string[];
  readonly artifactsPath: string;
  readonly cachePath: string;
  readonly solidityTestsPath: string;
}

/**
 * Returns the formatted root path for a dependency graph that has exactly one
 * root file.
 *
 * @param dependencyGraph A dependency graph with exactly one root file.
 * @returns The formatted root path.
 * @throws If the graph doesn't have exactly one root file.
 */
function getSingleRootFilePath(dependencyGraph: DependencyGraph): string {
  assertHardhatInvariant(
    dependencyGraph.getRoots().size === 1,
    "dependency graph doesn't have exactly 1 root file",
  );

  const [userSourceName, root] = [...dependencyGraph.getRoots().entries()][0];
  return formatRootPath(userSourceName, root);
}

export class SolidityBuildSystemImplementation implements SolidityBuildSystem {
  readonly #hooks: HookManager;
  readonly #options: SolidityBuildSystemOptions;
  #compileCache: CompileCache = {};
  #configuredCompilersDownloaded = false;

  constructor(hooks: HookManager, options: SolidityBuildSystemOptions) {
    this.#hooks = hooks;
    this.#options = options;
  }

  public async getScope(fsPath: string): Promise<BuildScope> {
    if (
      fsPath.startsWith(this.#options.solidityTestsPath + path.sep) &&
      fsPath.endsWith(".sol")
    ) {
      return "tests";
    }

    if (fsPath.endsWith(".t.sol")) {
      for (const sourcesPath of this.#options.soliditySourcesPaths) {
        if (fsPath.startsWith(sourcesPath + path.sep)) {
          return "tests";
        }
      }
    }

    return "contracts";
  }

  public async getRootFilePaths(
    options: { scope?: BuildScope } = {},
  ): Promise<string[]> {
    const scope = options.scope ?? "contracts";

    this.#ensureSplitCompilationModeIfTestsScope(scope);

    const unified = !this.#options.solidityConfig.splitTestsCompilation;
    const { localContractFiles, sourceTestFiles } =
      await this.#getSoliditySourcesRootFilePaths();

    switch (scope) {
      case "contracts": {
        const npmFilesToBuild =
          this.#options.solidityConfig.npmFilesToBuild.map(
            npmModuleToNpmRootPath,
          );

        if (!unified) {
          return [...localContractFiles, ...npmFilesToBuild];
        }

        // In unified mode, contracts scope returns all roots: contracts,
        // tests, and npm files.
        const testFiles = [
          ...(await getAllFilesMatching(this.#options.solidityTestsPath, (f) =>
            f.endsWith(".sol"),
          )),
          ...sourceTestFiles,
        ];

        // Remove duplicates in case there is an intersection between
        // the tests.solidity paths and the sources paths
        return Array.from(
          new Set([...localContractFiles, ...npmFilesToBuild, ...testFiles]),
        );
      }
      case "tests": {
        let rootFilePaths = [
          ...(await getAllFilesMatching(this.#options.solidityTestsPath, (f) =>
            f.endsWith(".sol"),
          )),
          ...sourceTestFiles,
        ];

        // NOTE: We remove duplicates in case there is an intersection between
        // the tests.solidity paths and the sources paths
        rootFilePaths = Array.from(new Set(rootFilePaths));
        return rootFilePaths;
      }
    }
  }

  /**
   * Returns all the root files from the different solidity source dirs in the
   * config, partitioned into contract files and test files according to their
   * extensions.
   */
  async #getSoliditySourcesRootFilePaths(): Promise<{
    localContractFiles: string[];
    sourceTestFiles: string[];
  }> {
    const sourceFileGroups = await Promise.all(
      this.#options.soliditySourcesPaths.map(async (dir) => {
        const localSolidityFiles = await getAllFilesMatching(dir, (f) =>
          f.endsWith(".sol"),
        );

        const localContractFiles: string[] = [];
        const sourceTestFiles: string[] = [];

        for (const file of localSolidityFiles) {
          if (file.endsWith(".t.sol")) {
            sourceTestFiles.push(file);
          } else {
            localContractFiles.push(file);
          }
        }

        return { localContractFiles, sourceTestFiles };
      }),
    );

    return {
      localContractFiles: sourceFileGroups.flatMap(
        ({ localContractFiles }) => localContractFiles,
      ),
      sourceTestFiles: sourceFileGroups.flatMap(
        ({ sourceTestFiles }) => sourceTestFiles,
      ),
    };
  }

  public isSuccessfulBuildResult(
    buildResult: CompilationJobCreationError | Map<string, FileBuildResult>,
  ): buildResult is Map<string, FileBuildResult> {
    return buildResult instanceof Map;
  }

  public async build(
    rootFilePaths: string[],
    options?: BuildOptions,
  ): Promise<CompilationJobCreationError | Map<string, FileBuildResult>> {
    this.#ensureSplitCompilationModeIfTestsScope(options?.scope);

    return await this.#hooks.runHandlerChain(
      "solidity",
      "build",
      [rootFilePaths, options],
      async (_context, nextRootFilePaths, nextOptions) =>
        await this.#build(nextRootFilePaths, nextOptions),
    );
  }

  async #build(
    rootFilePaths: string[],
    options?: BuildOptions,
  ): Promise<CompilationJobCreationError | Map<string, FileBuildResult>> {
    const resolvedOptions: Required<BuildOptions> = {
      buildProfile: DEFAULT_BUILD_PROFILE,
      concurrency: Math.max(os.cpus().length - 1, 1),
      force: false,
      isolated: false,
      quiet: false,
      scope: "contracts",
      ...options,
    };

    await this.#downloadConfiguredCompilers(resolvedOptions.quiet);

    const { buildProfile } = this.#getBuildProfile(
      resolvedOptions.buildProfile,
    );

    const compilationJobsResult = await this.getCompilationJobs(
      rootFilePaths,
      resolvedOptions,
    );

    if (!compilationJobsResult.success) {
      return compilationJobsResult;
    }

    const spinner = createSpinner({
      text: `Compiling your Solidity ${resolvedOptions.scope}...`,
      enabled: true,
    });
    spinner.start();

    try {
      const { compilationJobsPerFile, indexedIndividualJobs, cacheHits } =
        compilationJobsResult;

      const runnableCompilationJobs = [
        ...new Set(compilationJobsPerFile.values()),
      ];

      // NOTE: We precompute the build ids in parallel here, which are cached
      // internally in each compilation job
      await Promise.all(
        runnableCompilationJobs.map(
          async (runnableCompilationJob) =>
            await runnableCompilationJob.getBuildId(),
        ),
      );

      // We sort the compilation jobs in descending order of estimated
      // compilation cost. This way we can use this algorithm:
      // https://en.wikipedia.org/wiki/Longest-processing-time-first_scheduling
      //
      // Note that it works because pMap schedules the jobs in the order they
      // are in the array.
      const sortedCompilationJobs = sortCompilationJobsByDescendingCost(
        runnableCompilationJobs,
      );

      const results: CompilationResult[] = await pMap(
        sortedCompilationJobs,
        async (runnableCompilationJob) => {
          const { output, compiler } = await this.runCompilationJob(
            runnableCompilationJob,
            resolvedOptions,
          );

          return {
            compilationJob: runnableCompilationJob,
            compilerOutput: output,
            compiler,
          };
        },
        {
          concurrency: resolvedOptions.concurrency,
          // An error when running the compiler is not a compilation failure, but
          // a fatal failure trying to run it, so we just throw on the first error
          stopOnError: true,
        },
      );

      const successfulResults = results.filter(
        (result) => !this.#hasCompilationErrors(result.compilerOutput),
      );

      const isSuccessfulBuild = results.length === successfulResults.length;

      const contractArtifactsGeneratedByCompilationJob: Map<
        CompilationJob,
        ReadonlyMap<string, string[]>
      > = new Map();

      if (isSuccessfulBuild) {
        log("Emitting artifacts of successful build");
        await Promise.all(
          results.map(async (compilationResult) => {
            const emitArtifactsResult = await this.emitArtifacts(
              compilationResult.compilationJob,
              compilationResult.compilerOutput,
              resolvedOptions,
            );

            const { artifactsPerFile } = emitArtifactsResult;

            contractArtifactsGeneratedByCompilationJob.set(
              compilationResult.compilationJob,
              artifactsPerFile,
            );

            // Cache the results
            await this.#cacheCompilationResult(
              indexedIndividualJobs,
              compilationResult,
              emitArtifactsResult,
              buildProfile.isolated,
              resolvedOptions.scope,
            );
          }),
        );

        await saveCache(this.#options.cachePath, this.#compileCache);
      }

      spinner.stop();

      const resultsMap: Map<string, FileBuildResult> = new Map();

      for (const result of results) {
        const contractArtifactsGenerated = isSuccessfulBuild
          ? contractArtifactsGeneratedByCompilationJob.get(
              result.compilationJob,
            )
          : new Map();

        assertHardhatInvariant(
          contractArtifactsGenerated !== undefined,
          "We emitted contract artifacts for all the jobs if the build was successful",
        );

        const errors = await Promise.all(
          (result.compilerOutput.errors ?? []).map((error) =>
            this.remapCompilerError(result.compilationJob, error, true),
          ),
        );

        this.#printSolcErrorsAndWarnings(errors);
        const successfulResult = !this.#hasCompilationErrors(
          result.compilerOutput,
        );

        for (const [
          userSourceName,
          root,
        ] of result.compilationJob.dependencyGraph.getRoots().entries()) {
          if (!successfulResult) {
            resultsMap.set(formatRootPath(userSourceName, root), {
              type: FileBuildResultType.BUILD_FAILURE,
              compilationJob: result.compilationJob,
              errors,
            });

            continue;
          }

          resultsMap.set(formatRootPath(userSourceName, root), {
            type: FileBuildResultType.BUILD_SUCCESS,
            compilationJob: result.compilationJob,
            contractArtifactsGenerated:
              contractArtifactsGenerated.get(userSourceName) ?? [],
            warnings: errors,
          });
        }
      }

      // Add cache hits to the results map
      for (const [rootFilePath, cacheHitInfo] of cacheHits.entries()) {
        resultsMap.set(rootFilePath, {
          type: FileBuildResultType.CACHE_HIT,
          buildId: cacheHitInfo.buildId,
          contractArtifactsGenerated: cacheHitInfo.artifactPaths,
        });
      }

      if (!resolvedOptions.quiet) {
        if (isSuccessfulBuild) {
          await this.#printCompilationResult(runnableCompilationJobs, {
            scope: resolvedOptions.scope,
          });
        }
      }

      return resultsMap;
    } finally {
      spinner.stop();
    }
  }

  public async getCompilationJobs(
    rootFilePaths: string[],
    options?: GetCompilationJobsOptions,
  ): Promise<CompilationJobCreationError | GetCompilationJobsResult> {
    this.#ensureSplitCompilationModeIfTestsScope(options?.scope);

    await this.#downloadConfiguredCompilers(options?.quiet);

    const dependencyGraph = await buildDependencyGraph(
      rootFilePaths.toSorted(), // We sort them to have a deterministic order
      this.#options.projectRoot,
      readSourceFileFactory(this.#hooks),
      this.#hooks,
    );

    const { buildProfileName, buildProfile } = this.#getBuildProfile(
      options?.buildProfile,
    );

    log(`Using build profile ${buildProfileName}`);

    let toolVersions: ToolVersions | undefined;
    if (buildProfile.toolVersionsInBuildInfo === true) {
      toolVersions = { hardhat: await getHardhatVersion() };
    }

    const solcConfigSelector = new SolcConfigSelector(
      buildProfileName,
      buildProfile,
    );

    let subgraphsWithConfig: Array<
      [SolidityCompilerConfig, DependencyGraphImplementation]
    > = [];
    for (const [rootFile, resolvedFile] of dependencyGraph.getRoots()) {
      log(
        `Building compilation job for root file ${rootFile} with input source name ${resolvedFile.inputSourceName} and user source name ${rootFile}`,
      );

      const subgraph = dependencyGraph.getSubgraph(rootFile);

      const configOrError =
        solcConfigSelector.selectBestSolcConfigForSingleRootGraph(subgraph);

      if (!configOrError.success) {
        return configOrError;
      }

      subgraphsWithConfig.push([configOrError.config, subgraph]);
    }

    // get longVersion and isWasm from the compiler for each version
    // These maps are keyed by compiler type first, then version, to avoid
    // collisions between different compiler types using the same version string.
    const solidityVersionToLongVersionPerCompilerType = new Map<
      string,
      Map<string, string>
    >();
    const versionIsWasmPerCompilerType = new Map<
      string,
      Map<string, boolean>
    >();
    for (const [compilerConfig] of subgraphsWithConfig) {
      const compilerType = compilerConfig.type ?? "solc";
      let longVersionMap =
        solidityVersionToLongVersionPerCompilerType.get(compilerType);
      if (longVersionMap === undefined) {
        longVersionMap = new Map();
        solidityVersionToLongVersionPerCompilerType.set(
          compilerType,
          longVersionMap,
        );
      }

      let isWasmMap = versionIsWasmPerCompilerType.get(compilerType);
      if (isWasmMap === undefined) {
        isWasmMap = new Map();
        versionIsWasmPerCompilerType.set(compilerType, isWasmMap);
      }

      let longVersion = longVersionMap.get(compilerConfig.version);

      if (longVersion === undefined) {
        const compiler = await this.#hooks.runHandlerChain(
          "solidity",
          "getCompiler",
          [compilerConfig],
          async (_context, cfg) =>
            await getSolcCompilerForConfig(cfg, buildProfile.preferWasm),
        );
        longVersion = compiler.longVersion;
        longVersionMap.set(compilerConfig.version, longVersion);
        isWasmMap.set(compilerConfig.version, compiler.isSolcJs);
      }
    }

    // build job for each root file. At this point subgraphsWithConfig are 1 root file each
    const indexedIndividualJobs: Map<string, CompilationJob> = new Map();
    const sharedContentHashes = new Map<string, string>();
    await Promise.all(
      subgraphsWithConfig.map(async ([config, subgraph]) => {
        const compilerType = config.type ?? "solc";
        const longVersionMap =
          solidityVersionToLongVersionPerCompilerType.get(compilerType);

        assertHardhatInvariant(
          longVersionMap !== undefined,
          `No long version map for compiler type ${compilerType}`,
        );

        const longVersion = longVersionMap.get(config.version);

        assertHardhatInvariant(
          longVersion !== undefined,
          "longVersion should not be undefined",
        );

        const individualJob = new CompilationJobImplementation(
          subgraph,
          config,
          longVersion,
          this.#hooks,
          sharedContentHashes,
          toolVersions,
        );

        await individualJob.getBuildId(); // precompute

        indexedIndividualJobs.set(
          getSingleRootFilePath(subgraph),
          individualJob,
        );
      }),
    );

    // Load the cache
    this.#compileCache = await loadCache(this.#options.cachePath);

    // Select which files to compile
    const rootFilesToCompile: Set<string> = new Set();
    const cacheHits: Map<string, CacheHitInfo> = new Map();

    const isolated = buildProfile.isolated;

    for (const [rootFile, compilationJob] of indexedIndividualJobs.entries()) {
      const jobHash = await compilationJob.getBuildId();
      const cacheResult = this.#compileCache[rootFile];
      const compilerType = compilationJob.solcConfig.type ?? "solc";
      const isWasmMap = versionIsWasmPerCompilerType.get(compilerType);

      assertHardhatInvariant(
        isWasmMap !== undefined,
        `No isWasm map for compiler type ${compilerType}`,
      );

      const isWasm = isWasmMap.get(compilationJob.solcConfig.version);

      assertHardhatInvariant(
        isWasm !== undefined,
        `Version ${compilationJob.solcConfig.version} not present in isWasm map`,
      );

      // If there's no cache for the root file, or the compilation job changed, or using force flag, or isolated mode changed, compile it
      if (
        options?.force === true ||
        cacheResult === undefined ||
        cacheResult.jobHash !== jobHash ||
        cacheResult.isolated !== isolated ||
        cacheResult.compilerType !== compilerType ||
        cacheResult.wasm !== isWasm
      ) {
        rootFilesToCompile.add(rootFile);
        continue;
      }

      // Validate output layout: if the cached layout doesn't match the
      // expected layout for the current config, treat it as a miss.
      // Pre-existing cache entries without these fields are also treated
      // as misses.
      const expectedLayout = await this.#getExpectedOutputLayout(
        rootFile,
        options?.scope ?? "contracts",
      );

      if (
        cacheResult.artifactsDirectory === undefined ||
        cacheResult.emitsTypeDeclarations === undefined ||
        cacheResult.artifactsDirectory !== expectedLayout.artifactsDirectory ||
        cacheResult.emitsTypeDeclarations !==
          expectedLayout.emitsTypeDeclarations
      ) {
        rootFilesToCompile.add(rootFile);
        continue;
      }

      // If any of the emitted files are not present anymore, compile it
      const {
        artifactPaths,
        buildInfoPath,
        buildInfoOutputPath,
        typeFilePath,
      } = cacheResult;

      for (const outputFilePath of [
        ...artifactPaths,
        buildInfoPath,
        buildInfoOutputPath,
        typeFilePath,
      ]) {
        // Type declaration file can be undefined (e.g. for solidity tests)
        if (outputFilePath === undefined) {
          continue;
        }

        if (!(await exists(outputFilePath))) {
          rootFilesToCompile.add(rootFile);
          break;
        }
      }

      // If file was not added to rootFilesToCompile, it's a cache hit
      if (!rootFilesToCompile.has(rootFile)) {
        // Extract buildId from buildInfoPath (format: <dir>/<buildId>.json)
        const buildId = path.basename(cacheResult.buildInfoPath, ".json");
        cacheHits.set(getSingleRootFilePath(compilationJob.dependencyGraph), {
          buildId,
          artifactPaths,
        });
      }
    }

    if (!isolated) {
      // non-isolated mode
      log(`Merging compilation jobs`);

      const mergedSubgraphsByConfig: Map<
        SolidityCompilerConfig,
        DependencyGraphImplementation
      > = new Map();

      // Note: This groups the subgraphs by compiler config. It compares the
      // configs based on reference, and not by deep equality. This is
      // inherently type-aware: two configs with different types will always be
      // different references. It misses some merging opportunities, but this is
      // Hardhat v2's behavior and works well enough.
      for (const [config, subgraph] of subgraphsWithConfig) {
        const rootFile = getSingleRootFilePath(subgraph);

        // Skip root files with cache hit (should not recompile)
        if (!rootFilesToCompile.has(rootFile)) {
          continue;
        }

        const mergedSubgraph = mergedSubgraphsByConfig.get(config);

        if (mergedSubgraph === undefined) {
          mergedSubgraphsByConfig.set(config, subgraph);
        } else {
          mergedSubgraphsByConfig.set(config, mergedSubgraph.merge(subgraph));
        }
      }

      subgraphsWithConfig = [...mergedSubgraphsByConfig.entries()];
    } else {
      // isolated mode
      subgraphsWithConfig = subgraphsWithConfig.filter(
        ([_config, subgraph]) => {
          const rootFile = getSingleRootFilePath(subgraph);

          return rootFilesToCompile.has(rootFile);
        },
      );
    }

    const compilationJobsPerFile = new Map<string, CompilationJob>();
    for (const [compilerConfig, subgraph] of subgraphsWithConfig) {
      const compilerType = compilerConfig.type ?? "solc";
      const longVersionMap =
        solidityVersionToLongVersionPerCompilerType.get(compilerType);

      assertHardhatInvariant(
        longVersionMap !== undefined,
        `No long version map for compiler type ${compilerType}`,
      );

      const longVersion = longVersionMap.get(compilerConfig.version);

      assertHardhatInvariant(
        longVersion !== undefined,
        "longVersion should not be undefined",
      );

      const runnableCompilationJob = new CompilationJobImplementation(
        subgraph,
        compilerConfig,
        longVersion,
        this.#hooks,
        sharedContentHashes,
        toolVersions,
      );

      for (const [userSourceName, root] of subgraph.getRoots().entries()) {
        compilationJobsPerFile.set(
          formatRootPath(userSourceName, root),
          runnableCompilationJob,
        );
      }
    }

    return {
      success: true,
      compilationJobsPerFile,
      indexedIndividualJobs,
      cacheHits,
    };
  }

  #getBuildProfile(buildProfileName: string = DEFAULT_BUILD_PROFILE) {
    const buildProfile =
      this.#options.solidityConfig.profiles[buildProfileName];

    if (buildProfile === undefined) {
      throw new HardhatError(
        HardhatError.ERRORS.CORE.SOLIDITY.BUILD_PROFILE_NOT_FOUND,
        {
          buildProfileName,
        },
      );
    }

    return { buildProfileName, buildProfile };
  }

  public async runCompilationJob(
    runnableCompilationJob: CompilationJob,
    options?: RunCompilationJobOptions,
  ): Promise<RunCompilationJobResult> {
    await this.#downloadConfiguredCompilers(options?.quiet);

    let numberOfFiles = 0;
    for (const _ of runnableCompilationJob.dependencyGraph.getAllFiles()) {
      numberOfFiles++;
    }

    const numberOfRootFiles =
      runnableCompilationJob.dependencyGraph.getRoots().size;

    const { buildProfile } = this.#getBuildProfile(options?.buildProfile);

    const compiler = await this.#hooks.runHandlerChain(
      "solidity",
      "getCompiler",
      [runnableCompilationJob.solcConfig],
      async (_context, cfg) =>
        await getSolcCompilerForConfig(cfg, buildProfile.preferWasm),
    );

    log(
      `Compiling ${numberOfRootFiles} root files and ${numberOfFiles - numberOfRootFiles} dependency files with ${runnableCompilationJob.solcConfig.type ?? "solc"} ${runnableCompilationJob.solcConfig.version} using ${compiler.compilerPath}`,
    );

    assertHardhatInvariant(
      runnableCompilationJob.solcLongVersion === compiler.longVersion,
      "The long version of the compiler should match the long version of the compilation job",
    );

    const input = await runnableCompilationJob.getSolcInput();

    const output = await this.#hooks.runHandlerChain(
      "solidity",
      "invokeSolc",
      [compiler, input, runnableCompilationJob.solcConfig],
      async (_context, nextCompiler, nextSolcInput) => {
        return await nextCompiler.compile(nextSolcInput);
      },
    );

    return { output, compiler };
  }

  public async remapCompilerError(
    runnableCompilationJob: CompilationJob,
    error: CompilerOutputError,
    shouldShortenPaths: boolean = false,
  ): Promise<CompilerOutputError> {
    return {
      type: error.type,
      component: error.component,
      message: error.message,
      severity: error.severity,
      errorCode: error.errorCode,
      formattedMessage: error.formattedMessage?.replace(
        /(-->\s+)([^\s:\n]+)/g,
        (_match, prefix, inputSourceName) => {
          const file =
            runnableCompilationJob.dependencyGraph.getFileByInputSourceName(
              inputSourceName,
            );

          if (file === undefined) {
            return `${prefix}${inputSourceName}`;
          }

          const replacement = shouldShortenPaths
            ? shortenPath(file.fsPath)
            : file.fsPath;

          return `${prefix}${replacement}`;
        },
      ),
    };
  }

  public async emitArtifacts(
    runnableCompilationJob: CompilationJob,
    compilerOutput: CompilerOutput,
    options: { scope?: BuildScope } = {},
  ): Promise<EmitArtifactsResult> {
    const scope = options.scope ?? "contracts";

    this.#ensureSplitCompilationModeIfTestsScope(scope);

    const unified = !this.#options.solidityConfig.splitTestsCompilation;

    const artifactsPerFile = new Map<string, string[]>();
    const typeFilePaths = new Map<string, string>();
    const buildId = await runnableCompilationJob.getBuildId();

    const artifactsDirectory = await this.getArtifactsDirectory(scope);

    // We emit the artifacts for each root file, first emitting one artifact
    // for each contract, and then one declaration file for the entire file,
    // which defines their types and augments the ArtifactMap type.
    for (const [userSourceName, root] of runnableCompilationJob.dependencyGraph
      .getRoots()
      .entries()) {
      const fileFolder = path.join(artifactsDirectory, userSourceName);

      // If the folder exists, we remove it first, as we don't want to leave
      // any old artifacts there.
      await remove(fileFolder);

      const contracts = compilerOutput.contracts?.[root.inputSourceName];
      const paths: string[] = [];
      const artifacts: Artifact[] = [];

      // This can be undefined if no contract is present in the source file
      if (contracts !== undefined) {
        for (const [contractName, contract] of Object.entries(contracts)) {
          const contractArtifactPath = path.join(
            fileFolder,
            `${contractName}.json`,
          );

          const artifact = getContractArtifact(
            buildId,
            userSourceName,
            root.inputSourceName,
            contractName,
            contract,
          );

          await writeUtf8File(
            contractArtifactPath,
            JSON.stringify(artifact, undefined, 2),
          );

          paths.push(contractArtifactPath);
          artifacts.push(artifact);
        }
      }

      artifactsPerFile.set(formatRootPath(userSourceName, root), paths);

      // In split mode, test roots are never part of a "contracts"-scoped pass,
      // so the scope guard below is sufficient. In unified mode, both contract
      // and test roots share the same pass, so we check individually.
      const isTestRoot = unified
        ? (await this.getScope(root.fsPath)) === "tests"
        : false;

      // Write the type declaration file for contract roots only.
      if (scope === "contracts" && !isTestRoot) {
        const artifactsDeclarationFilePath = path.join(
          fileFolder,
          "artifacts.d.ts",
        );
        typeFilePaths.set(
          formatRootPath(userSourceName, root),
          artifactsDeclarationFilePath,
        );

        const artifactsDeclarationFile = getArtifactsDeclarationFile(artifacts);

        await writeUtf8File(
          artifactsDeclarationFilePath,
          artifactsDeclarationFile,
        );
      }
    }

    // Once we have emitted all the contract artifacts and its declaration
    // file, we emit the build info file and its output file.
    const buildInfoId = buildId;

    const buildInfoCacheDirPath = path.join(
      this.#options.cachePath,
      `build-info`,
    );

    await ensureDir(buildInfoCacheDirPath);

    const buildInfoCachePath = path.join(
      buildInfoCacheDirPath,
      `${buildInfoId}.json`,
    );

    const buildInfoOutputCachePath = path.join(
      buildInfoCacheDirPath,
      `${buildInfoId}.output.json`,
    );

    // BuildInfo and BuildInfoOutput files are large, so we write them
    // concurrently, and keep their lifetimes separated and small.
    // NOTE: First, we write the build info file and its output to the cache
    // directory. Once both are successfully written, we move them to the
    // artifacts directory sequentially, ensuring the build info file is moved
    // last. This approach minimizes the risk of having corrupted build info
    // files in the artifacts directory and ensures other processes, like
    // `hardhat node`, can safely monitor the build info file as an indicator
    // for build completion.
    await Promise.all([
      (async () => {
        const buildInfo = await getBuildInfo(runnableCompilationJob);

        // TODO: Maybe formatting the build info is slow, but it's mostly
        // strings, so it probably shouldn't be a problem.
        await writeJsonFile(buildInfoCachePath, buildInfo);
      })(),
      (async () => {
        const buildInfoOutput = await getBuildInfoOutput(
          runnableCompilationJob,
          compilerOutput,
        );

        // NOTE: We use writeJsonFileAsStream here because the build info output might exceed
        // the maximum string length.
        // TODO: Earlier in the build process, very similar files are created on disk by the
        // Compiler.  Instead of creating them again, we should consider copying/moving them.
        // This would require changing the format of the build info output file.
        await writeJsonFileAsStream(buildInfoOutputCachePath, buildInfoOutput);
      })(),
    ]);

    const buildInfoDirPath = path.join(artifactsDirectory, `build-info`);

    await ensureDir(buildInfoDirPath);

    const buildInfoPath = path.join(buildInfoDirPath, `${buildInfoId}.json`);

    const buildInfoOutputPath = path.join(
      buildInfoDirPath,
      `${buildInfoId}.output.json`,
    );

    await move(buildInfoOutputCachePath, buildInfoOutputPath);
    await move(buildInfoCachePath, buildInfoPath);

    return {
      artifactsPerFile,
      buildInfoPath,
      buildInfoOutputPath,
      typeFilePaths,
    };
  }

  public async getArtifactsDirectory(scope: BuildScope): Promise<string> {
    // In unified mode, both scopes point to the main artifacts directory
    // because contract and test artifacts live together.
    if (!this.#options.solidityConfig.splitTestsCompilation) {
      return this.#options.artifactsPath;
    }

    return scope === "contracts"
      ? this.#options.artifactsPath
      : path.join(this.#options.cachePath, "test-artifacts");
  }

  public async cleanupArtifacts(
    rootFilePaths: string[],
    options: { scope?: BuildScope } = {},
  ): Promise<void> {
    const scope = options.scope ?? "contracts";

    this.#ensureSplitCompilationModeIfTestsScope(scope);

    log(`Cleaning up artifacts`);
    const artifactsDirectory = await this.getArtifactsDirectory(scope);

    const userSourceNames = rootFilePaths.map((rootFilePath) => {
      const parsed = parseRootPath(rootFilePath);
      return isNpmParsedRootPath(parsed)
        ? parsed.npmPath
        : toForwardSlash(
            path.relative(this.#options.projectRoot, parsed.fsPath),
          );
    });

    const userSourceNamesSet = new Set(userSourceNames);

    for (const file of await getAllDirectoriesMatching(
      artifactsDirectory,
      (d) => d.endsWith(".sol"),
    )) {
      const relativePath = toForwardSlash(
        path.relative(artifactsDirectory, file),
      );

      if (!userSourceNamesSet.has(relativePath)) {
        await remove(file);
      }
    }

    const buildInfosDir = path.join(artifactsDirectory, `build-info`);

    // TODO: This logic is duplicated with respect to the artifacts manager
    const artifactPaths = await getAllFilesMatching(
      artifactsDirectory,
      (p) =>
        p.endsWith(".json") && // Only consider json files
        // Ignore top level json files
        p.indexOf(path.sep, artifactsDirectory.length + path.sep.length) !== -1,
      (dir) => dir !== buildInfosDir,
    );

    const reachableBuildInfoIds = await Promise.all(
      artifactPaths.map(async (artifactPath) => {
        const artifact: Artifact = await readJsonFile(artifactPath);
        return artifact.buildInfoId;
      }),
    );

    const reachableBuildInfoIdsSet = new Set(
      reachableBuildInfoIds.filter((id) => id !== undefined),
    );

    // The build-info directory is expected to be flat: every build-info file
    // lives directly under it, so a non-recursive `readdir` is enough.
    const buildInfoFiles = await readdirOrEmpty(buildInfosDir);

    for (const buildInfoFile of buildInfoFiles) {
      let id: string | undefined;

      if (buildInfoFile.endsWith(".output.json")) {
        id = buildInfoFile.slice(0, -".output.json".length);
      } else if (buildInfoFile.endsWith(".json")) {
        id = buildInfoFile.slice(0, -".json".length);
      } else {
        continue;
      }

      if (!reachableBuildInfoIdsSet.has(id)) {
        await remove(path.join(buildInfosDir, buildInfoFile));
      }
    }

    // These steps only apply when compiling contracts
    if (scope === "contracts") {
      // Get duplicated contract names and write a top-level artifacts.d.ts file
      const artifactNameCounts = new Map<string, number>();
      for (const artifactPath of artifactPaths) {
        const basename = path.basename(artifactPath);
        const name = basename.substring(0, basename.indexOf("."));

        const count = artifactNameCounts.get(name) ?? 0;

        artifactNameCounts.set(name, count + 1);
      }

      const duplicatedNames = [...artifactNameCounts.entries()]
        .filter(([_, count]) => count > 1)
        .map(([name, _]) => name);

      const duplicatedContractNamesDeclarationFilePath = path.join(
        artifactsDirectory,
        "artifacts.d.ts",
      );

      await writeUtf8File(
        duplicatedContractNamesDeclarationFilePath,
        getDuplicatedContractNamesDeclarationFile(duplicatedNames),
      );

      // Run the onCleanUpArtifacts hook
      await this.#hooks.runHandlerChain(
        "solidity",
        "onCleanUpArtifacts",
        [artifactPaths],
        async () => {},
      );
    }
  }

  public async compileBuildInfo(
    buildInfo: SolidityBuildInfo,
    options?: CompileBuildInfoOptions,
  ): Promise<CompilerOutput> {
    const quiet = options?.quiet ?? false;

    // Build info recompilation is always solc-only: build info files are
    // produced by solc and must be recompiled with the same solc version.
    // We bypass both downloadCompilers and getCompiler hooks — this is a
    // self-contained solc replay path, not plugin-configurable compilation.
    await downloadSolcCompilers(new Set([buildInfo.solcVersion]), quiet);

    const compiler = await getCompiler(buildInfo.solcVersion, {
      preferWasm: false,
    });

    return await compiler.compile(buildInfo.input);
  }

  async #downloadConfiguredCompilers(quiet = false): Promise<void> {
    // We always print that we are downloading the compilers
    quiet = false;
    if (this.#configuredCompilersDownloaded) {
      return;
    }

    const allSolidityCompilerConfigs = this.#getAllSolidityCompilerConfigs();
    await this.#hooks.runParallelHandlers("solidity", "downloadCompilers", [
      allSolidityCompilerConfigs,
      quiet,
    ]);
    this.#configuredCompilersDownloaded = true;
  }

  #getAllSolidityCompilerConfigs(): SolidityCompilerConfig[] {
    return Object.values(this.#options.solidityConfig.profiles).flatMap(
      (profile) => [...profile.compilers, ...Object.values(profile.overrides)],
    );
  }

  #isConsoleLogError(error: CompilerOutputError): boolean {
    const message = error.message;

    return (
      error.type === "TypeError" &&
      typeof message === "string" &&
      message.includes("log") &&
      message.includes("type(library console)")
    );
  }

  #isFatalError(error: CompilerOutputError): boolean {
    return error.type !== "Warning" && error.severity === "error";
  }

  #hasCompilationErrors(output: CompilerOutput): boolean {
    return output.errors?.some((e) => this.#isFatalError(e)) ?? false;
  }

  /**
   * This function returns a properly formatted Internal Compiler Error message.
   *
   * This is present due to a bug in Solidity. See: https://github.com/ethereum/solidity/issues/9926
   *
   * If the error is not an ICE, or if it's properly formatted, this function returns undefined.
   */
  #getFormattedInternalCompilerErrorMessage(
    error: CompilerOutputError,
  ): string | undefined {
    if (error.formattedMessage?.trim() !== "InternalCompilerError:") {
      return;
    }

    // We trim any final `:`, as we found some at the end of the error messages,
    // and then trim just in case a blank space was left
    return `${error.type}: ${error.message}`.replace(/[:\s]*$/g, "").trim();
  }

  async #getExpectedOutputLayout(
    rootFilePath: string,
    scope: BuildScope,
  ): Promise<{ artifactsDirectory: string; emitsTypeDeclarations: boolean }> {
    const artifactsDirectory = await this.getArtifactsDirectory(scope);

    const unified = !this.#options.solidityConfig.splitTestsCompilation;

    // In unified mode, test roots under contracts scope don't emit type
    // declarations. In split mode, the scope alone determines this.
    let emitsTypeDeclarations: boolean;
    if (scope === "contracts") {
      if (unified) {
        const parsed = parseRootPath(rootFilePath);
        const isTestRoot = isNpmParsedRootPath(parsed)
          ? false
          : (await this.getScope(parsed.fsPath)) === "tests";
        emitsTypeDeclarations = !isTestRoot;
      } else {
        emitsTypeDeclarations = true;
      }
    } else {
      emitsTypeDeclarations = false;
    }

    return { artifactsDirectory, emitsTypeDeclarations };
  }

  async #cacheCompilationResult(
    indexedIndividualJobs: Map<string, CompilationJob>,
    result: CompilationResult,
    emitArtifactsResult: EmitArtifactsResult,
    isolated: boolean,
    scope: BuildScope,
  ): Promise<void> {
    for (const [userSourceName, root] of result.compilationJob.dependencyGraph
      .getRoots()
      .entries()) {
      const rootFilePath = formatRootPath(userSourceName, root);
      const individualJob = indexedIndividualJobs.get(rootFilePath);

      assertHardhatInvariant(
        individualJob !== undefined,
        "Failed to get individual job from compiled job",
      );

      const artifactPaths =
        emitArtifactsResult.artifactsPerFile.get(rootFilePath);

      assertHardhatInvariant(
        artifactPaths !== undefined,
        `No artifacts found on map for ${rootFilePath}`,
      );

      const typeFilePath = emitArtifactsResult.typeFilePaths.get(rootFilePath);

      const jobHash = await individualJob.getBuildId();

      const expectedLayout = await this.#getExpectedOutputLayout(
        rootFilePath,
        scope,
      );

      this.#compileCache[rootFilePath] = {
        jobHash,
        isolated,
        compilerType: individualJob.solcConfig.type ?? "solc",
        artifactPaths,
        buildInfoPath: emitArtifactsResult.buildInfoPath,
        buildInfoOutputPath: emitArtifactsResult.buildInfoOutputPath,
        typeFilePath,
        wasm: result.compiler.isSolcJs,
        artifactsDirectory: expectedLayout.artifactsDirectory,
        emitsTypeDeclarations: expectedLayout.emitsTypeDeclarations,
      };
    }
  }

  #printSolcErrorsAndWarnings(errors?: CompilerOutputError[]): void {
    if (errors === undefined) {
      return;
    }

    // Filter out specific warnings that should be suppressed
    const filteredErrors = errors.filter(
      (error) => !this.#shouldSuppressWarning(error),
    );

    console.log();

    for (const error of filteredErrors) {
      if (this.#isFatalError(error)) {
        const errorMessage: string =
          this.#getFormattedInternalCompilerErrorMessage(error) ??
          error.formattedMessage ??
          error.message;

        console.error(
          errorMessage
            .replace(/^\w+:/, (t) => styleText(["red", "bold"], t))
            .trimEnd() + "\n",
        );
      } else {
        console.warn(
          (error.formattedMessage ?? error.message)
            .replace(/^\w+:/, (t) => styleText(["yellow", "bold"], t))
            .trimEnd() + "\n",
        );
      }
    }

    const hasConsoleErrors: boolean = filteredErrors.some((e) =>
      this.#isConsoleLogError(e),
    );

    if (hasConsoleErrors) {
      console.error(
        styleText(
          "red",
          `The console.log call you made isn't supported. See https://hardhat.org/console-log for the list of supported methods.`,
        ),
      );
      console.log();
    }
  }

  #shouldSuppressWarning(error: CompilerOutputError): boolean {
    const msg = error.formattedMessage ?? error.message;
    return shouldSuppressWarning(
      msg,
      this.#options.solidityTestsPath,
      this.#options.projectRoot,
    );
  }

  async #printCompilationResult(
    runnableCompilationJobs: CompilationJob[],
    options: { scope: BuildScope },
  ) {
    const jobsPerVersionAndEvmVersion = new Map<
      string,
      Map<string, CompilationJob[]>
    >();

    if (runnableCompilationJobs.length === 0) {
      if (options.scope === "contracts") {
        console.log("No contracts to compile");
      } else {
        console.log("No Solidity tests to compile");
      }

      return;
    }

    for (const job of runnableCompilationJobs) {
      const compilerType = job.solcConfig.type ?? "solc";
      const solcVersion = job.solcConfig.version;
      const solcInput = await job.getSolcInput();
      const evmVersion =
        solcInput.settings.evmVersion ??
        `Check solc ${solcVersion}'s doc for its default evm version`;

      // Group by compiler type + Solidity version to produce separate log
      // lines for e.g. "solc 0.8.33" vs "solx 0.1.3 (Solidity 0.8.33)".
      const groupKey = `${compilerType}#${solcVersion}`;

      let jobsPerVersion = jobsPerVersionAndEvmVersion.get(groupKey);
      if (jobsPerVersion === undefined) {
        jobsPerVersion = new Map();
        jobsPerVersionAndEvmVersion.set(groupKey, jobsPerVersion);
      }

      let jobsPerEvmVersion = jobsPerVersion.get(evmVersion);
      if (jobsPerEvmVersion === undefined) {
        jobsPerEvmVersion = [];
        jobsPerVersion.set(evmVersion, jobsPerEvmVersion);
      }

      jobsPerEvmVersion.push(job);
    }

    for (const groupKey of [...jobsPerVersionAndEvmVersion.keys()].sort()) {
      /* eslint-disable-next-line @typescript-eslint/no-non-null-assertion --
      This is a valid key, just sorted */
      const jobsPerEvmVersion = jobsPerVersionAndEvmVersion.get(groupKey)!;
      const [compilerType, solidityVersion] = groupKey.split("#");

      for (const evmVersion of [...jobsPerEvmVersion.keys()].sort()) {
        /* eslint-disable-next-line @typescript-eslint/no-non-null-assertion --
        This is a valid key, just sorted */
        const jobs = jobsPerEvmVersion.get(evmVersion)!;

        const rootFiles = jobs.reduce(
          (count, job) => count + job.dependencyGraph.getRoots().size,
          0,
        );

        // For solc, the compiler version is the Solidity version.
        // For other compilers, extract the compiler's own version from the
        // longVersion stored on the compilation job, and show the Solidity
        // version separately.
        let compilerLabel: string;
        if (compilerType === "solc") {
          compilerLabel = `solc ${solidityVersion}`;
        } else {
          const longVersion = jobs[0].solcLongVersion;
          const compilerVersion = longVersion.split("+")[0];
          compilerLabel = `${compilerType} ${compilerVersion} (Solidity ${solidityVersion})`;
        }

        console.log(
          styleText(
            "bold",
            `Compiled ${rootFiles} Solidity ${pluralize(
              options.scope === "contracts" ? "file" : "test file",
              rootFiles,
            )} with ${compilerLabel}`,
          ),
          `(evm target: ${evmVersion})`,
        );
      }
    }
  }

  #ensureSplitCompilationModeIfTestsScope(scope: BuildScope = "contracts") {
    if (
      scope === "tests" &&
      !this.#options.solidityConfig.splitTestsCompilation
    ) {
      throw new HardhatError(
        HardhatError.ERRORS.CORE.SOLIDITY.SPLIT_TESTS_COMPILATION_DISABLED,
      );
    }
  }
}

function toForwardSlash(str: string): string {
  return str.split(/[\\\/]/).join(path.posix.sep);
}
