Remove unused Git resolver methods (#10754)

This commit is contained in:
Charlie Marsh 2025-01-19 12:02:53 -05:00 committed by GitHub
parent c306e46e1d
commit 5e5cb8797d
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
2 changed files with 0 additions and 106 deletions

View file

@ -38,50 +38,6 @@ impl GitResolver {
self.0.get(reference)
}
/// Resolve a Git URL to a specific commit.
pub async fn resolve(
&self,
url: &GitUrl,
client: ClientWithMiddleware,
cache: PathBuf,
reporter: Option<Arc<dyn Reporter>>,
) -> Result<GitSha, GitResolverError> {
debug!("Resolving source distribution from Git: {url}");
let reference = RepositoryReference::from(url);
// If we know the precise commit already, return it.
if let Some(precise) = self.get(&reference) {
return Ok(*precise);
}
// Avoid races between different processes, too.
let lock_dir = cache.join("locks");
fs::create_dir_all(&lock_dir).await?;
let repository_url = RepositoryUrl::new(url.repository());
let _lock = LockedFile::acquire(
lock_dir.join(cache_digest(&repository_url)),
&repository_url,
)
.await?;
// Fetch the Git repository.
let source = if let Some(reporter) = reporter {
GitSource::new(url.clone(), client, cache).with_reporter(reporter)
} else {
GitSource::new(url.clone(), client, cache)
};
let precise = tokio::task::spawn_blocking(move || source.resolve())
.await?
.map_err(GitResolverError::Git)?;
// Insert the resolved URL into the in-memory cache. This ensures that subsequent fetches
// resolve to the same precise commit.
self.insert(reference, precise);
Ok(precise)
}
/// Fetch a remote Git repository.
pub async fn fetch(
&self,

View file

@ -52,68 +52,6 @@ impl GitSource {
}
}
/// Resolve a Git source to a specific revision.
#[instrument(skip(self), fields(repository = %self.git.repository, rev = ?self.git.precise))]
pub fn resolve(self) -> Result<GitSha> {
// Compute the canonical URL for the repository.
let canonical = RepositoryUrl::new(&self.git.repository);
// The path to the repo, within the Git database.
let ident = cache_digest(&canonical);
let db_path = self.cache.join("db").join(&ident);
// Authenticate the URL, if necessary.
let remote = if let Some(credentials) = GIT_STORE.get(&canonical) {
Cow::Owned(credentials.apply(self.git.repository.clone()))
} else {
Cow::Borrowed(&self.git.repository)
};
let remote = GitRemote::new(&remote);
let (db, actual_rev, task) = match (self.git.precise, remote.db_at(&db_path).ok()) {
// If we have a locked revision, and we have a preexisting database
// which has that revision, then no update needs to happen.
(Some(rev), Some(db)) if db.contains(rev.into()) => {
debug!("Using existing Git source `{}`", self.git.repository);
(db, rev, None)
}
// ... otherwise we use this state to update the git database. Note
// that we still check for being offline here, for example in the
// situation that we have a locked revision but the database
// doesn't have it.
(locked_rev, db) => {
debug!("Updating Git source `{}`", self.git.repository);
// Report the checkout operation to the reporter.
let task = self.reporter.as_ref().map(|reporter| {
reporter.on_checkout_start(remote.url(), self.git.reference.as_rev())
});
let (db, actual_rev) = remote.checkout(
&db_path,
db,
&self.git.reference,
locked_rev.map(GitOid::from),
&self.client,
)?;
(db, GitSha::from(actual_rev), task)
}
};
let short_id = db.to_short_id(actual_rev.into())?;
// Report the checkout operation to the reporter.
if let Some(task) = task {
if let Some(reporter) = self.reporter.as_ref() {
reporter.on_checkout_complete(remote.url(), short_id.as_str(), task);
}
}
Ok(actual_rev)
}
/// Fetch the underlying Git repository at the given revision.
#[instrument(skip(self), fields(repository = %self.git.repository, rev = ?self.git.precise))]
pub fn fetch(self) -> Result<Fetch> {