diff --git a/src/main/java/com/google/devtools/build/lib/bazel/bzlmod/ModuleExtensionContext.java b/src/main/java/com/google/devtools/build/lib/bazel/bzlmod/ModuleExtensionContext.java index 1314114a770d86..ec2eae86da9a97 100644 --- a/src/main/java/com/google/devtools/build/lib/bazel/bzlmod/ModuleExtensionContext.java +++ b/src/main/java/com/google/devtools/build/lib/bazel/bzlmod/ModuleExtensionContext.java @@ -71,6 +71,7 @@ protected ModuleExtensionContext( timeoutScaling, processWrapper, starlarkSemantics, + ModuleExtensionEvaluationProgress.moduleExtensionEvaluationContextString(extensionId), remoteExecutor, /* allowWatchingPathsOutsideWorkspace= */ false); this.extensionId = extensionId; @@ -83,8 +84,10 @@ public Path getWorkingDirectory() { } @Override - protected String getIdentifyingStringForLogging() { - return ModuleExtensionEvaluationProgress.moduleExtensionEvaluationContextString(extensionId); + protected boolean shouldDeleteWorkingDirectoryOnClose(boolean successful) { + // The contents of the working directory are purely ephemeral, only the repos instantiated by + // the extension are considered its results. + return true; } @Override diff --git a/src/main/java/com/google/devtools/build/lib/bazel/bzlmod/SingleExtensionEvalFunction.java b/src/main/java/com/google/devtools/build/lib/bazel/bzlmod/SingleExtensionEvalFunction.java index 3d521fb2e8ac4e..b1ca65d53d3b51 100644 --- a/src/main/java/com/google/devtools/build/lib/bazel/bzlmod/SingleExtensionEvalFunction.java +++ b/src/main/java/com/google/devtools/build/lib/bazel/bzlmod/SingleExtensionEvalFunction.java @@ -891,15 +891,15 @@ public RunModuleExtensionResult run( BazelModuleContext.of(bzlLoadValue.getModule()).repoMapping(), directories, env.getListener()); - ModuleExtensionContext moduleContext; Optional moduleExtensionMetadata; var repoMappingRecorder = new Label.RepoMappingRecorder(); repoMappingRecorder.mergeEntries(bzlLoadValue.getRecordedRepoMappings()); try (Mutability mu = - Mutability.create("module extension", usagesValue.getExtensionUniqueName())) { + Mutability.create("module extension", usagesValue.getExtensionUniqueName()); + ModuleExtensionContext moduleContext = + createContext(env, usagesValue, starlarkSemantics, extensionId)) { StarlarkThread thread = new StarlarkThread(mu, starlarkSemantics); thread.setPrintHandler(Event.makeDebugPrintHandler(env.getListener())); - moduleContext = createContext(env, usagesValue, starlarkSemantics, extensionId); threadContext.storeInThread(thread); new BazelStarlarkContext( Phase.WORKSPACE, @@ -938,39 +938,34 @@ public RunModuleExtensionResult run( moduleExtensionMetadata = Optional.empty(); } } catch (NeedsSkyframeRestartException e) { - // Clean up and restart by returning null. - try { - if (moduleContext.getWorkingDirectory().exists()) { - moduleContext.getWorkingDirectory().deleteTree(); - } - } catch (IOException e1) { - ExternalDepsException externalDepsException = - ExternalDepsException.withCauseAndMessage( - ExternalDeps.Code.EXTERNAL_DEPS_UNKNOWN, - e1, - "Failed to clean up module context directory"); - throw new SingleExtensionEvalFunctionException( - externalDepsException, Transience.TRANSIENT); - } + // Restart by returning null. return null; - } catch (EvalException e) { - env.getListener().handle(Event.error(e.getMessageWithStack())); - throw new SingleExtensionEvalFunctionException( - ExternalDepsException.withMessage( - ExternalDeps.Code.BAD_MODULE, - "error evaluating module extension %s in %s", - extensionId.getExtensionName(), - extensionId.getBzlFileLabel()), - Transience.TRANSIENT); } + moduleContext.markSuccessful(); + return RunModuleExtensionResult.create( + moduleContext.getRecordedFileInputs(), + moduleContext.getRecordedDirentsInputs(), + moduleContext.getRecordedEnvVarInputs(), + threadContext.getGeneratedRepoSpecs(), + moduleExtensionMetadata, + repoMappingRecorder.recordedEntries()); + } catch (EvalException e) { + env.getListener().handle(Event.error(e.getMessageWithStack())); + throw new SingleExtensionEvalFunctionException( + ExternalDepsException.withMessage( + ExternalDeps.Code.BAD_MODULE, + "error evaluating module extension %s in %s", + extensionId.getExtensionName(), + extensionId.getBzlFileLabel()), + Transience.TRANSIENT); + } catch (IOException e) { + ExternalDepsException externalDepsException = + ExternalDepsException.withCauseAndMessage( + ExternalDeps.Code.EXTERNAL_DEPS_UNKNOWN, + e, + "Failed to clean up module context directory"); + throw new SingleExtensionEvalFunctionException(externalDepsException, Transience.TRANSIENT); } - return RunModuleExtensionResult.create( - moduleContext.getRecordedFileInputs(), - moduleContext.getRecordedDirentsInputs(), - moduleContext.getRecordedEnvVarInputs(), - threadContext.getGeneratedRepoSpecs(), - moduleExtensionMetadata, - repoMappingRecorder.recordedEntries()); } private ModuleExtensionContext createContext( diff --git a/src/main/java/com/google/devtools/build/lib/bazel/repository/downloader/DownloadManager.java b/src/main/java/com/google/devtools/build/lib/bazel/repository/downloader/DownloadManager.java index 10b545ad0d1ea4..0b5064cc8dbcd6 100644 --- a/src/main/java/com/google/devtools/build/lib/bazel/repository/downloader/DownloadManager.java +++ b/src/main/java/com/google/devtools/build/lib/bazel/repository/downloader/DownloadManager.java @@ -24,7 +24,6 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; -import com.google.common.util.concurrent.ThreadFactoryBuilder; import com.google.devtools.build.lib.authandtls.StaticCredentials; import com.google.devtools.build.lib.bazel.repository.cache.RepositoryCache; import com.google.devtools.build.lib.bazel.repository.cache.RepositoryCache.KeyType; @@ -48,7 +47,6 @@ import java.util.Optional; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; import java.util.concurrent.Future; import javax.annotation.Nullable; @@ -59,17 +57,6 @@ * to disk. */ public class DownloadManager { - private static final ExecutorService DOWNLOAD_EXECUTOR = - Executors.newFixedThreadPool( - // There is also GrpcRemoteDownloader so if we set the thread pool to the same size as - // the allowed number of HTTP downloads, it might unnecessarily block. No, this is not a - // very - // principled approach; ideally, we'd grow the thread pool as needed with some generous - // upper - // limit. - 2 * HttpDownloader.MAX_PARALLEL_DOWNLOADS, - new ThreadFactoryBuilder().setNameFormat("download-manager-%d").build()); - private final RepositoryCache repositoryCache; private List distdir = ImmutableList.of(); private UrlRewriter rewriter; @@ -115,6 +102,7 @@ public void setCredentialFactory(CredentialFactory credentialFactory) { } public Future startDownload( + ExecutorService executorService, List originalUrls, Map> headers, Map>> authHeaders, @@ -125,7 +113,7 @@ public Future startDownload( ExtendedEventHandler eventHandler, Map clientEnv, String context) { - return DOWNLOAD_EXECUTOR.submit( + return executorService.submit( () -> { try (SilentCloseable c = Profiler.instance().profile("fetching: " + context)) { return downloadInExecutor( @@ -154,33 +142,6 @@ public Path finalizeDownload(Future download) throws IOException, Interrup } } - public Path download( - List originalUrls, - Map> headers, - Map>> authHeaders, - Optional checksum, - String canonicalId, - Optional type, - Path output, - ExtendedEventHandler eventHandler, - Map clientEnv, - String context) - throws IOException, InterruptedException { - Future future = - startDownload( - originalUrls, - headers, - authHeaders, - checksum, - canonicalId, - type, - output, - eventHandler, - clientEnv, - context); - return finalizeDownload(future); - } - /** * Downloads file to disk and returns path. * diff --git a/src/main/java/com/google/devtools/build/lib/bazel/repository/starlark/StarlarkBaseExternalContext.java b/src/main/java/com/google/devtools/build/lib/bazel/repository/starlark/StarlarkBaseExternalContext.java index c06a92734a74d0..864a5159fc2d12 100644 --- a/src/main/java/com/google/devtools/build/lib/bazel/repository/starlark/StarlarkBaseExternalContext.java +++ b/src/main/java/com/google/devtools/build/lib/bazel/repository/starlark/StarlarkBaseExternalContext.java @@ -39,7 +39,6 @@ import com.google.devtools.build.lib.cmdline.LabelConstants; import com.google.devtools.build.lib.cmdline.RepositoryName; import com.google.devtools.build.lib.events.Event; -import com.google.devtools.build.lib.events.EventHandler; import com.google.devtools.build.lib.events.ExtendedEventHandler.FetchProgress; import com.google.devtools.build.lib.packages.StarlarkInfo; import com.google.devtools.build.lib.packages.StructImpl; @@ -67,6 +66,7 @@ import com.google.devtools.build.lib.vfs.Symlinks; import com.google.devtools.build.skyframe.SkyFunction.Environment; import com.google.devtools.build.skyframe.SkyFunctionException.Transience; +import com.google.errorprone.annotations.ForOverride; import java.io.File; import java.io.IOException; import java.io.OutputStream; @@ -87,8 +87,8 @@ import java.util.List; import java.util.Map; import java.util.Optional; -import java.util.concurrent.CancellationException; -import java.util.concurrent.ExecutionException; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; import java.util.concurrent.Future; import javax.annotation.Nullable; import net.starlark.java.annot.Param; @@ -107,7 +107,7 @@ import net.starlark.java.syntax.Location; /** A common base class for Starlark "ctx" objects related to external dependencies. */ -public abstract class StarlarkBaseExternalContext implements StarlarkValue { +public abstract class StarlarkBaseExternalContext implements AutoCloseable, StarlarkValue { /** * An asynchronous task run as part of fetching the repository. @@ -145,12 +145,16 @@ private interface AsyncTask { protected final double timeoutScaling; @Nullable private final ProcessWrapper processWrapper; protected final StarlarkSemantics starlarkSemantics; + protected final String identifyingStringForLogging; private final HashMap recordedFileInputs = new HashMap<>(); private final HashMap recordedDirentsInputs = new HashMap<>(); private final HashSet accumulatedEnvKeys = new HashSet<>(); private final RepositoryRemoteExecutor remoteExecutor; private final List asyncTasks; private final boolean allowWatchingPathsOutsideWorkspace; + private final ExecutorService executorService; + + private boolean wasSuccessful = false; protected StarlarkBaseExternalContext( Path workingDirectory, @@ -161,6 +165,7 @@ protected StarlarkBaseExternalContext( double timeoutScaling, @Nullable ProcessWrapper processWrapper, StarlarkSemantics starlarkSemantics, + String identifyingStringForLogging, @Nullable RepositoryRemoteExecutor remoteExecutor, boolean allowWatchingPathsOutsideWorkspace) { this.workingDirectory = workingDirectory; @@ -172,22 +177,55 @@ protected StarlarkBaseExternalContext( this.timeoutScaling = timeoutScaling; this.processWrapper = processWrapper; this.starlarkSemantics = starlarkSemantics; + this.identifyingStringForLogging = identifyingStringForLogging; this.remoteExecutor = remoteExecutor; this.asyncTasks = new ArrayList<>(); this.allowWatchingPathsOutsideWorkspace = allowWatchingPathsOutsideWorkspace; + this.executorService = + Executors.newThreadPerTaskExecutor( + Thread.ofVirtual() + .name("downloads[" + identifyingStringForLogging + "]-", 0) + .factory()); + } + + /** + * Mark the evaluation using this context as otherwise successful. This is used to determine how + * to clean up resources in {@link #close()}. + */ + public final void markSuccessful() { + wasSuccessful = true; + } + + @Override + public final void close() throws EvalException, IOException { + // Cancel all pending async tasks. + boolean hadPendingItems = cancelPendingAsyncTasks(); + // Wait for all (cancelled) async tasks to complete before cleaning up the working directory. + // This is necessary because downloads may still be in progress and could end up writing to the + // working directory during deletion, which would cause an error. + executorService.close(); + if (shouldDeleteWorkingDirectoryOnClose(wasSuccessful)) { + workingDirectory.deleteTree(); + } + if (hadPendingItems && wasSuccessful) { + throw Starlark.errorf( + "Pending asynchronous work after %s finished execution", identifyingStringForLogging); + } } - public boolean ensureNoPendingAsyncTasks(EventHandler eventHandler, boolean forSuccessfulFetch) { + private boolean cancelPendingAsyncTasks() { boolean hadPendingItems = false; for (AsyncTask task : asyncTasks) { if (!task.cancel()) { hadPendingItems = true; - if (forSuccessfulFetch) { - eventHandler.handle( - Event.error( - task.getLocation(), - "Work pending after repository rule finished execution: " - + task.getDescription())); + if (wasSuccessful) { + env.getListener() + .handle( + Event.error( + task.getLocation(), + String.format( + "Work pending after %s finished execution: %s", + identifyingStringForLogging, task.getDescription()))); } } } @@ -197,12 +235,12 @@ public boolean ensureNoPendingAsyncTasks(EventHandler eventHandler, boolean forS // There is no unregister(). We don't have that many futures in each repository and it just // introduces the failure mode of erroneously unregistering async work that's not done. - protected void registerAsyncTask(AsyncTask task) { + protected final void registerAsyncTask(AsyncTask task) { asyncTasks.add(task); } - /** A string that can be used to identify this context object. Used for logging purposes. */ - protected abstract String getIdentifyingStringForLogging(); + @ForOverride + protected abstract boolean shouldDeleteWorkingDirectoryOnClose(boolean successful); /** Returns the file digests used by this context object so far. */ public ImmutableMap getRecordedFileInputs() { @@ -416,7 +454,7 @@ private Optional validateChecksum(String sha256, String integrity, Lis warnAboutChecksumError(urls, e.getMessage()); throw new RepositoryFunctionException( Starlark.errorf( - "Checksum error in %s: %s", getIdentifyingStringForLogging(), e.getMessage()), + "Checksum error in %s: %s", identifyingStringForLogging, e.getMessage()), Transience.PERSISTENT); } } @@ -430,8 +468,7 @@ private Optional validateChecksum(String sha256, String integrity, Lis } catch (Checksum.InvalidChecksumException e) { warnAboutChecksumError(urls, e.getMessage()); throw new RepositoryFunctionException( - Starlark.errorf( - "Checksum error in %s: %s", getIdentifyingStringForLogging(), e.getMessage()), + Starlark.errorf("Checksum error in %s: %s", identifyingStringForLogging, e.getMessage()), Transience.PERSISTENT); } } @@ -513,18 +550,7 @@ public Location getLocation() { @Override public boolean cancel() { - if (!future.cancel(true)) { - return true; - } - - try { - future.get(); - return false; - } catch (InterruptedException | ExecutionException | CancellationException e) { - // Ignore. The only thing we care about is that there is no async work in progress after - // this point. Any error reporting should have been done before. - return false; - } + return !future.cancel(true); } @StarlarkMethod( @@ -700,7 +726,7 @@ public Object download( sha256, integrity, executable, - getIdentifyingStringForLogging(), + identifyingStringForLogging, thread.getCallerLocation()); env.getListener().post(w); @@ -721,6 +747,7 @@ public Object download( if (download == null) { Future downloadFuture = downloadManager.startDownload( + executorService, urls, headers, authHeaders, @@ -730,7 +757,7 @@ public Object download( outputPath.getPath(), env.getListener(), envVariables, - getIdentifyingStringForLogging()); + identifyingStringForLogging); download = new PendingDownload( executable, @@ -904,7 +931,7 @@ public StructImpl downloadAndExtract( type, stripPrefix, renameFilesMap, - getIdentifyingStringForLogging(), + identifyingStringForLogging, thread.getCallerLocation()); StarlarkPath outputPath = getPath("download_and_extract()", output); @@ -922,6 +949,7 @@ public StructImpl downloadAndExtract( Future pendingDownload = downloadManager.startDownload( + executorService, urls, headers, authHeaders, @@ -931,7 +959,7 @@ public StructImpl downloadAndExtract( downloadDirectory, env.getListener(), envVariables, - getIdentifyingStringForLogging()); + identifyingStringForLogging); // Ensure that the download is cancelled if the repo rule is restarted as it runs in its own // executor. PendingDownload pendingTask = @@ -959,14 +987,14 @@ public StructImpl downloadAndExtract( } env.getListener().post(w); try (SilentCloseable c = - Profiler.instance().profile("extracting: " + getIdentifyingStringForLogging())) { + Profiler.instance().profile("extracting: " + identifyingStringForLogging)) { env.getListener() .post( new ExtractProgress( outputPath.getPath().toString(), "Extracting " + downloadedPath.getBaseName())); DecompressorValue.decompress( DecompressorDescriptor.builder() - .setContext(getIdentifyingStringForLogging()) + .setContext(identifyingStringForLogging) .setArchivePath(downloadedPath) .setDestinationPath(outputPath.getPath()) .setPrefix(stripPrefix) @@ -1073,7 +1101,7 @@ public void createFile( p.toString(), content, executable, - getIdentifyingStringForLogging(), + identifyingStringForLogging, thread.getCallerLocation()); env.getListener().post(w); try { @@ -1203,7 +1231,7 @@ public String readFile(Object path, String watch, StarlarkThread thread) StarlarkPath p = getPath("read()", path); WorkspaceRuleEvent w = WorkspaceRuleEvent.newReadEvent( - p.toString(), getIdentifyingStringForLogging(), thread.getCallerLocation()); + p.toString(), identifyingStringForLogging, thread.getCallerLocation()); env.getListener().post(w); maybeWatch(p, ShouldWatch.fromString(watch)); if (p.isDir()) { @@ -1380,7 +1408,7 @@ public void reportProgress(String status) { new FetchProgress() { @Override public String getResourceIdentifier() { - return getIdentifyingStringForLogging(); + return identifyingStringForLogging; } @Override @@ -1405,7 +1433,7 @@ public StarlarkOS getOS() { // manually inspect the code where this context object is used if they wish to find the // offending ctx.os expression. WorkspaceRuleEvent w = - WorkspaceRuleEvent.newOsEvent(getIdentifyingStringForLogging(), Location.BUILTIN); + WorkspaceRuleEvent.newOsEvent(identifyingStringForLogging, Location.BUILTIN); env.getListener().post(w); return osObject; } @@ -1614,7 +1642,7 @@ public StarlarkExecutionResult execute( forceEnvVariables, workingDirectory.getPathString(), quiet, - getIdentifyingStringForLogging(), + identifyingStringForLogging, thread.getCallerLocation()); env.getListener().post(w); createDirectory(workingDirectory); @@ -1664,7 +1692,7 @@ public StarlarkExecutionResult execute( public StarlarkPath which(String program, StarlarkThread thread) throws EvalException { WorkspaceRuleEvent w = WorkspaceRuleEvent.newWhichEvent( - program, getIdentifyingStringForLogging(), thread.getCallerLocation()); + program, identifyingStringForLogging, thread.getCallerLocation()); env.getListener().post(w); if (program.contains("/") || program.contains("\\")) { throw Starlark.errorf( diff --git a/src/main/java/com/google/devtools/build/lib/bazel/repository/starlark/StarlarkRepositoryContext.java b/src/main/java/com/google/devtools/build/lib/bazel/repository/starlark/StarlarkRepositoryContext.java index ad28d3119926d3..7a7590259f644c 100644 --- a/src/main/java/com/google/devtools/build/lib/bazel/repository/starlark/StarlarkRepositoryContext.java +++ b/src/main/java/com/google/devtools/build/lib/bazel/repository/starlark/StarlarkRepositoryContext.java @@ -77,7 +77,6 @@ + " repository rule.") public class StarlarkRepositoryContext extends StarlarkBaseExternalContext { private final Rule rule; - private final RepositoryName repoName; private final PathPackageLocator packageLocator; private final StructImpl attrObject; private final ImmutableSet ignoredPatterns; @@ -112,10 +111,11 @@ public class StarlarkRepositoryContext extends StarlarkBaseExternalContext { timeoutScaling, processWrapper, starlarkSemantics, + RepositoryFetchProgress.repositoryFetchContextString( + RepositoryName.createUnvalidated(rule.getName())), remoteExecutor, /* allowWatchingPathsOutsideWorkspace= */ true); this.rule = rule; - this.repoName = RepositoryName.createUnvalidated(rule.getName()); this.packageLocator = packageLocator; this.ignoredPatterns = ignoredPatterns; this.syscallCache = syscallCache; @@ -132,8 +132,8 @@ public class StarlarkRepositoryContext extends StarlarkBaseExternalContext { } @Override - protected String getIdentifyingStringForLogging() { - return RepositoryFetchProgress.repositoryFetchContextString(repoName); + protected boolean shouldDeleteWorkingDirectoryOnClose(boolean successful) { + return !successful; } public ImmutableMap getRecordedDirTreeInputs() { @@ -217,7 +217,7 @@ public void symlink(Object target, Object linkName, StarlarkThread thread) WorkspaceRuleEvent.newSymlinkEvent( targetPath.toString(), linkPath.toString(), - getIdentifyingStringForLogging(), + identifyingStringForLogging, thread.getCallerLocation()); env.getListener().post(w); try { @@ -309,7 +309,7 @@ public void createFileFromTemplate( t.toString(), substitutionMap, executable, - getIdentifyingStringForLogging(), + identifyingStringForLogging, thread.getCallerLocation()); env.getListener().post(w); if (t.isDir()) { @@ -374,7 +374,7 @@ public boolean delete(Object pathObject, StarlarkThread thread) StarlarkPath starlarkPath = externalPath("delete()", pathObject); WorkspaceRuleEvent w = WorkspaceRuleEvent.newDeleteEvent( - starlarkPath.toString(), getIdentifyingStringForLogging(), thread.getCallerLocation()); + starlarkPath.toString(), identifyingStringForLogging, thread.getCallerLocation()); env.getListener().post(w); try { Path path = starlarkPath.getPath(); @@ -432,7 +432,7 @@ public void patch(Object patchFile, StarlarkInt stripI, String watchPatch, Starl WorkspaceRuleEvent.newPatchEvent( starlarkPath.toString(), strip, - getIdentifyingStringForLogging(), + identifyingStringForLogging, thread.getCallerLocation()); env.getListener().post(w); if (starlarkPath.isDir()) { @@ -543,7 +543,7 @@ public void extract( output.toString(), stripPrefix, renameFilesMap, - getIdentifyingStringForLogging(), + identifyingStringForLogging, thread.getCallerLocation()); env.getListener().post(w); @@ -553,7 +553,7 @@ public void extract( outputPath.getPath().toString(), "Extracting " + archivePath.getBasename())); DecompressorValue.decompress( DecompressorDescriptor.builder() - .setContext(getIdentifyingStringForLogging()) + .setContext(identifyingStringForLogging) .setArchivePath(archivePath.getPath()) .setDestinationPath(outputPath.getPath()) .setPrefix(stripPrefix) diff --git a/src/main/java/com/google/devtools/build/lib/bazel/repository/starlark/StarlarkRepositoryFunction.java b/src/main/java/com/google/devtools/build/lib/bazel/repository/starlark/StarlarkRepositoryFunction.java index a20c56df9e546a..042f3003eb7e33 100644 --- a/src/main/java/com/google/devtools/build/lib/bazel/repository/starlark/StarlarkRepositoryFunction.java +++ b/src/main/java/com/google/devtools/build/lib/bazel/repository/starlark/StarlarkRepositoryFunction.java @@ -33,9 +33,9 @@ import com.google.devtools.build.lib.cmdline.LabelConstants; import com.google.devtools.build.lib.cmdline.RepositoryMapping; import com.google.devtools.build.lib.cmdline.RepositoryName; +import com.google.devtools.build.lib.cmdline.SymbolGenerator; import com.google.devtools.build.lib.events.Event; import com.google.devtools.build.lib.packages.Rule; -import com.google.devtools.build.lib.cmdline.SymbolGenerator; import com.google.devtools.build.lib.packages.semantics.BuildLanguageOptions; import com.google.devtools.build.lib.pkgcache.PathPackageLocator; import com.google.devtools.build.lib.profiler.Profiler; @@ -260,7 +260,22 @@ private RepositoryDirectoryValue.Builder fetchInternal( } ImmutableSet ignoredPatterns = checkNotNull(ignoredPackagesValue).getPatterns(); - try (Mutability mu = Mutability.create("Starlark repository")) { + try (Mutability mu = Mutability.create("Starlark repository"); + StarlarkRepositoryContext starlarkRepositoryContext = + new StarlarkRepositoryContext( + rule, + packageLocator, + outputDirectory, + ignoredPatterns, + env, + ImmutableMap.copyOf(clientEnvironment), + downloadManager, + timeoutScaling, + processWrapper, + starlarkSemantics, + repositoryRemoteExecutor, + syscallCache, + directories)) { StarlarkThread thread = new StarlarkThread(mu, starlarkSemantics); thread.setPrintHandler(Event.makeDebugPrintHandler(env.getListener())); var repoMappingRecorder = new Label.RepoMappingRecorder(); @@ -279,22 +294,6 @@ private RepositoryDirectoryValue.Builder fetchInternal( () -> mainRepoMapping) .storeInThread(thread); - StarlarkRepositoryContext starlarkRepositoryContext = - new StarlarkRepositoryContext( - rule, - packageLocator, - outputDirectory, - ignoredPatterns, - env, - ImmutableMap.copyOf(clientEnvironment), - downloadManager, - timeoutScaling, - processWrapper, - starlarkSemantics, - repositoryRemoteExecutor, - syscallCache, - directories); - if (starlarkRepositoryContext.isRemotable()) { // If a rule is declared remotable then invalidate it if remote execution gets // enabled or disabled. @@ -318,7 +317,6 @@ private RepositoryDirectoryValue.Builder fetchInternal( // it possible to return null and not block but it doesn't seem to be easy with Starlark // structure as it is. Object result; - boolean fetchSuccessful = false; try (SilentCloseable c = Profiler.instance() .profile(ProfilerTask.STARLARK_REPOSITORY_FN, () -> rule.getLabel().toString())) { @@ -326,19 +324,9 @@ private RepositoryDirectoryValue.Builder fetchInternal( Starlark.call( thread, function, - /*args=*/ ImmutableList.of(starlarkRepositoryContext), - /*kwargs=*/ ImmutableMap.of()); - fetchSuccessful = true; - } finally { - if (starlarkRepositoryContext.ensureNoPendingAsyncTasks( - env.getListener(), fetchSuccessful)) { - if (fetchSuccessful) { - throw new RepositoryFunctionException( - new EvalException( - "Pending asynchronous work after repository rule finished running"), - Transience.PERSISTENT); - } - } + /* args= */ ImmutableList.of(starlarkRepositoryContext), + /* kwargs= */ ImmutableMap.of()); + starlarkRepositoryContext.markSuccessful(); } RepositoryResolvedEvent resolved = @@ -368,14 +356,6 @@ private RepositoryDirectoryValue.Builder fetchInternal( env.getListener().post(resolved); } catch (NeedsSkyframeRestartException e) { - // A dependency is missing, cleanup and returns null - try { - if (outputDirectory.exists()) { - outputDirectory.deleteTree(); - } - } catch (IOException e1) { - throw new RepositoryFunctionException(e1, Transience.TRANSIENT); - } return null; } catch (EvalException e) { env.getListener() @@ -390,6 +370,8 @@ private RepositoryDirectoryValue.Builder fetchInternal( throw new RepositoryFunctionException( new AlreadyReportedRepositoryAccessException(e), Transience.TRANSIENT); + } catch (IOException e) { + throw new RepositoryFunctionException(e, Transience.TRANSIENT); } if (!outputDirectory.isDirectory()) { diff --git a/src/test/java/com/google/devtools/build/lib/bazel/repository/downloader/HttpDownloaderTest.java b/src/test/java/com/google/devtools/build/lib/bazel/repository/downloader/HttpDownloaderTest.java index 9d8199a5274e13..84a862ea85a04e 100644 --- a/src/test/java/com/google/devtools/build/lib/bazel/repository/downloader/HttpDownloaderTest.java +++ b/src/test/java/com/google/devtools/build/lib/bazel/repository/downloader/HttpDownloaderTest.java @@ -43,10 +43,12 @@ import java.net.ServerSocket; import java.net.Socket; import java.net.SocketTimeoutException; +import java.net.URI; import java.net.URL; import java.util.ArrayList; import java.util.Collections; import java.util.List; +import java.util.Map; import java.util.Optional; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; @@ -114,7 +116,8 @@ public void downloadFrom1UrlOk() throws IOException, InterruptedException { }); Path resultingFile = - downloadManager.download( + download( + downloadManager, Collections.singletonList( new URL(String.format("http://localhost:%d/foo", server.getLocalPort()))), Collections.emptyMap(), @@ -180,7 +183,8 @@ public void downloadFrom2UrlsFirstOk() throws IOException, InterruptedException urls.add(new URL(String.format("http://localhost:%d/foo", server2.getLocalPort()))); Path resultingFile = - downloadManager.download( + download( + downloadManager, urls, Collections.emptyMap(), Collections.emptyMap(), @@ -248,7 +252,8 @@ public void downloadFrom2UrlsFirstSocketTimeoutOnBodyReadSecondOk() urls.add(new URL(String.format("http://localhost:%d/foo", server2.getLocalPort()))); Path resultingFile = - downloadManager.download( + download( + downloadManager, urls, Collections.emptyMap(), Collections.emptyMap(), @@ -318,7 +323,8 @@ public void downloadFrom2UrlsBothSocketTimeoutDuringBodyRead() Path outputFile = fs.getPath(workingDir.newFile().getAbsolutePath()); try { - downloadManager.download( + download( + downloadManager, urls, Collections.emptyMap(), Collections.emptyMap(), @@ -657,7 +663,8 @@ public void download_contentLengthMismatch_propagateErrorIfNotRetry() throws Exc assertThrows( ContentLengthMismatchException.class, () -> - downloadManager.download( + download( + downloadManager, ImmutableList.of(new URL("http://localhost")), Collections.emptyMap(), ImmutableMap.of(), @@ -697,7 +704,8 @@ public void download_contentLengthMismatch_retries() throws Exception { .download(any(), any(), any(), any(), any(), any(), any(), any(), any()); Path result = - downloadManager.download( + download( + downloadManager, ImmutableList.of(new URL("http://localhost")), ImmutableMap.of(), ImmutableMap.of(), @@ -742,7 +750,8 @@ public void download_contentLengthMismatchWithOtherErrors_retries() throws Excep .download(any(), any(), any(), any(), any(), any(), any(), any(), any()); Path result = - downloadManager.download( + download( + downloadManager, ImmutableList.of(new URL("http://localhost")), ImmutableMap.of(), ImmutableMap.of(), @@ -758,4 +767,35 @@ public void download_contentLengthMismatchWithOtherErrors_retries() throws Excep String content = new String(result.getInputStream().readAllBytes(), UTF_8); assertThat(content).isEqualTo("content"); } + + public Path download( + DownloadManager downloadManager, + List originalUrls, + Map> headers, + Map>> authHeaders, + Optional checksum, + String canonicalId, + Optional type, + Path output, + ExtendedEventHandler eventHandler, + Map clientEnv, + String context) + throws IOException, InterruptedException { + try (ExecutorService executorService = Executors.newVirtualThreadPerTaskExecutor()) { + Future future = + downloadManager.startDownload( + executorService, + originalUrls, + headers, + authHeaders, + checksum, + canonicalId, + type, + output, + eventHandler, + clientEnv, + context); + return downloadManager.finalizeDownload(future); + } + } } diff --git a/src/test/py/bazel/bzlmod/bazel_module_test.py b/src/test/py/bazel/bzlmod/bazel_module_test.py index 436d23881e3eb5..1634454f089ae6 100644 --- a/src/test/py/bazel/bzlmod/bazel_module_test.py +++ b/src/test/py/bazel/bzlmod/bazel_module_test.py @@ -1030,6 +1030,39 @@ def testLabelDebugPrint(self): 'rule //:foo @other_module//:bar @@canonical_name//:baz', stderr ) + def testPendingDownloadDetected(self): + self.ScratchFile( + 'MODULE.bazel', + [ + 'ext = use_extension("extensions.bzl", "ext")', + 'use_repo(ext, "ext")', + ], + ) + self.ScratchFile('BUILD') + self.ScratchFile( + 'extensions.bzl', + [ + 'repo_rule = repository_rule(lambda _: None)', + 'def ext_impl(module_ctx):', + ' repo_rule(name = "ext")', + ( + ' module_ctx.download(url = "https://bcr.bazel.build", output' + ' = "download", block = False)' + ), + 'ext = module_extension(implementation = ext_impl)', + ], + ) + exit_code, _, stderr = self.RunBazel( + ['build', '--nobuild', '@ext//:all'], + allow_failure=True, + ) + self.AssertExitCode(exit_code, 48, stderr) + self.assertIn( + 'ERROR: Pending asynchronous work after module extension ext in' + ' @@//:extensions.bzl finished execution', + stderr, + ) + if __name__ == '__main__': absltest.main() diff --git a/src/test/shell/bazel/jdeps_test.sh b/src/test/shell/bazel/jdeps_test.sh index 2776ebee9c4c88..ec58c651d3a114 100755 --- a/src/test/shell/bazel/jdeps_test.sh +++ b/src/test/shell/bazel/jdeps_test.sh @@ -72,7 +72,8 @@ function test_jdeps() { # src/test/shell/bazel/jdeps_class_denylist.txt. find . -type f -iname \*class | \ grep -vFf "$denylist" | \ - xargs -s 262144 ../jdk/reduced/bin/jdeps --list-reduced-deps --ignore-missing-deps | \ + sort -r | \ + xargs -s 400000 ../jdk/reduced/bin/jdeps --list-reduced-deps --ignore-missing-deps | \ grep -v "unnamed module" > ../jdeps \ || fail "Failed to run jdeps on non denylisted class files." cd ..