1use std::collections::BTreeMap;
2use std::collections::BTreeSet;
3use std::collections::HashMap;
4use std::fs::{self, File};
5use std::io::prelude::*;
6use std::io::SeekFrom;
7use std::path::{Path, PathBuf};
8use std::task::Poll;
9
10use crate::core::dependency::DepKind;
11use crate::core::manifest::Target;
12use crate::core::resolver::CliFeatures;
13use crate::core::resolver::HasDevUnits;
14use crate::core::PackageIdSpecQuery;
15use crate::core::Shell;
16use crate::core::Verbosity;
17use crate::core::Workspace;
18use crate::core::{Package, PackageId, PackageSet, Resolve, SourceId};
19use crate::ops::lockfile::LOCKFILE_NAME;
20use crate::ops::registry::{infer_registry, RegistryOrIndex};
21use crate::sources::path::PathEntry;
22use crate::sources::registry::index::{IndexPackage, RegistryDependency};
23use crate::sources::{PathSource, CRATES_IO_REGISTRY};
24use crate::util::cache_lock::CacheLockMode;
25use crate::util::context::JobsConfig;
26use crate::util::errors::CargoResult;
27use crate::util::restricted_names;
28use crate::util::toml::prepare_for_publish;
29use crate::util::FileLock;
30use crate::util::Filesystem;
31use crate::util::GlobalContext;
32use crate::util::Graph;
33use crate::util::HumanBytes;
34use crate::{drop_println, ops};
35use anyhow::{bail, Context as _};
36use cargo_util::paths;
37use cargo_util_schemas::messages;
38use flate2::{Compression, GzBuilder};
39use tar::{Builder, EntryType, Header, HeaderMode};
40use tracing::debug;
41use unicase::Ascii as UncasedAscii;
42
43mod vcs;
44mod verify;
45
46#[derive(Debug, Clone)]
50pub enum PackageMessageFormat {
51 Human,
52 Json,
53}
54
55impl PackageMessageFormat {
56 pub const POSSIBLE_VALUES: [&str; 2] = ["human", "json"];
57
58 pub const DEFAULT: &str = "human";
59}
60
61impl std::str::FromStr for PackageMessageFormat {
62 type Err = anyhow::Error;
63
64 fn from_str(s: &str) -> Result<PackageMessageFormat, anyhow::Error> {
65 match s {
66 "human" => Ok(PackageMessageFormat::Human),
67 "json" => Ok(PackageMessageFormat::Json),
68 f => bail!("unknown message format `{f}`"),
69 }
70 }
71}
72
73#[derive(Clone)]
74pub struct PackageOpts<'gctx> {
75 pub gctx: &'gctx GlobalContext,
76 pub list: bool,
77 pub fmt: PackageMessageFormat,
78 pub check_metadata: bool,
79 pub allow_dirty: bool,
80 pub include_lockfile: bool,
81 pub verify: bool,
82 pub jobs: Option<JobsConfig>,
83 pub keep_going: bool,
84 pub to_package: ops::Packages,
85 pub targets: Vec<String>,
86 pub cli_features: CliFeatures,
87 pub reg_or_index: Option<ops::RegistryOrIndex>,
88}
89
90const ORIGINAL_MANIFEST_FILE: &str = "Cargo.toml.orig";
91const VCS_INFO_FILE: &str = ".cargo_vcs_info.json";
92
93struct ArchiveFile {
94 rel_path: PathBuf,
97 rel_str: String,
99 contents: FileContents,
101}
102
103enum FileContents {
104 OnDisk(PathBuf),
106 Generated(GeneratedFile),
108}
109
110enum GeneratedFile {
111 Manifest(PathBuf),
115 Lockfile(Option<PathBuf>),
119 VcsInfo(vcs::VcsInfo),
121}
122
123#[tracing::instrument(skip_all)]
125fn create_package(
126 ws: &Workspace<'_>,
127 pkg: &Package,
128 ar_files: Vec<ArchiveFile>,
129 local_reg: Option<&TmpRegistry<'_>>,
130) -> CargoResult<FileLock> {
131 let gctx = ws.gctx();
132 let filecount = ar_files.len();
133
134 for dep in pkg.dependencies() {
136 super::check_dep_has_version(dep, false)?;
137 }
138
139 let filename = pkg.package_id().tarball_name();
140 let dir = ws.target_dir().join("package");
141 let mut dst = {
142 let tmp = format!(".{}", filename);
143 dir.open_rw_exclusive_create(&tmp, gctx, "package scratch space")?
144 };
145
146 gctx.shell()
151 .status("Packaging", pkg.package_id().to_string())?;
152 dst.file().set_len(0)?;
153 let uncompressed_size = tar(ws, pkg, local_reg, ar_files, dst.file(), &filename)
154 .context("failed to prepare local package for uploading")?;
155
156 dst.seek(SeekFrom::Start(0))?;
157 let src_path = dst.path();
158 let dst_path = dst.parent().join(&filename);
159 fs::rename(&src_path, &dst_path)
160 .context("failed to move temporary tarball into final location")?;
161
162 let dst_metadata = dst
163 .file()
164 .metadata()
165 .with_context(|| format!("could not learn metadata for: `{}`", dst_path.display()))?;
166 let compressed_size = dst_metadata.len();
167
168 let uncompressed = HumanBytes(uncompressed_size);
169 let compressed = HumanBytes(compressed_size);
170
171 let message = format!("{filecount} files, {uncompressed:.1} ({compressed:.1} compressed)");
172 drop(gctx.shell().status("Packaged", message));
174
175 return Ok(dst);
176}
177
178pub fn package(ws: &Workspace<'_>, opts: &PackageOpts<'_>) -> CargoResult<Vec<FileLock>> {
183 let specs = &opts.to_package.to_package_id_specs(ws)?;
184 if let ops::Packages::Packages(_) = opts.to_package {
186 for spec in specs.iter() {
187 let member_ids = ws.members().map(|p| p.package_id());
188 spec.query(member_ids)?;
189 }
190 }
191 let mut pkgs = ws.members_with_features(specs, &opts.cli_features)?;
192
193 pkgs.retain(|(pkg, _feats)| specs.iter().any(|spec| spec.matches(pkg.package_id())));
196
197 Ok(do_package(ws, opts, pkgs)?
198 .into_iter()
199 .map(|x| x.2)
200 .collect())
201}
202
203pub(crate) fn package_with_dep_graph(
209 ws: &Workspace<'_>,
210 opts: &PackageOpts<'_>,
211 pkgs: Vec<(&Package, CliFeatures)>,
212) -> CargoResult<LocalDependencies<(CliFeatures, FileLock)>> {
213 let output = do_package(ws, opts, pkgs)?;
214
215 Ok(local_deps(output.into_iter().map(
216 |(pkg, opts, tarball)| (pkg, (opts.cli_features, tarball)),
217 )))
218}
219
220fn do_package<'a>(
221 ws: &Workspace<'_>,
222 opts: &PackageOpts<'a>,
223 pkgs: Vec<(&Package, CliFeatures)>,
224) -> CargoResult<Vec<(Package, PackageOpts<'a>, FileLock)>> {
225 if ws
226 .lock_root()
227 .as_path_unlocked()
228 .join(LOCKFILE_NAME)
229 .exists()
230 && opts.include_lockfile
231 {
232 let dry_run = false;
234 let _ = ops::resolve_ws(ws, dry_run)?;
235 }
238
239 let deps = local_deps(pkgs.iter().map(|(p, f)| ((*p).clone(), f.clone())));
240 let just_pkgs: Vec<_> = pkgs.iter().map(|p| p.0).collect();
241
242 let mut local_reg = if ws.gctx().cli_unstable().package_workspace {
243 let sid = if (deps.has_dependencies() && (opts.include_lockfile || opts.verify))
248 || opts.reg_or_index.is_some()
249 {
250 let sid = get_registry(ws.gctx(), &just_pkgs, opts.reg_or_index.clone())?;
251 debug!("packaging for registry {}", sid);
252 Some(sid)
253 } else {
254 None
255 };
256 let reg_dir = ws.build_dir().join("package").join("tmp-registry");
257 sid.map(|sid| TmpRegistry::new(ws.gctx(), reg_dir, sid))
258 .transpose()?
259 } else {
260 None
261 };
262
263 let sorted_pkgs = deps.sort();
266 let mut outputs: Vec<(Package, PackageOpts<'_>, FileLock)> = Vec::new();
267 for (pkg, cli_features) in sorted_pkgs {
268 let opts = PackageOpts {
269 cli_features: cli_features.clone(),
270 to_package: ops::Packages::Default,
271 ..opts.clone()
272 };
273 let ar_files = prepare_archive(ws, &pkg, &opts)?;
274
275 if opts.list {
276 match opts.fmt {
277 PackageMessageFormat::Human => {
278 for ar_file in &ar_files {
281 drop_println!(ws.gctx(), "{}", ar_file.rel_str);
282 }
283 }
284 PackageMessageFormat::Json => {
285 let message = messages::PackageList {
286 id: pkg.package_id().to_spec(),
287 files: BTreeMap::from_iter(ar_files.into_iter().map(|f| {
288 let file = match f.contents {
289 FileContents::OnDisk(path) => messages::PackageFile::Copy { path },
290 FileContents::Generated(
291 GeneratedFile::Manifest(path)
292 | GeneratedFile::Lockfile(Some(path)),
293 ) => messages::PackageFile::Generate { path: Some(path) },
294 FileContents::Generated(
295 GeneratedFile::VcsInfo(_) | GeneratedFile::Lockfile(None),
296 ) => messages::PackageFile::Generate { path: None },
297 };
298 (f.rel_path, file)
299 })),
300 };
301 let _ = ws.gctx().shell().print_json(&message);
302 }
303 }
304 } else {
305 let tarball = create_package(ws, &pkg, ar_files, local_reg.as_ref())?;
306 if let Some(local_reg) = local_reg.as_mut() {
307 if pkg.publish() != &Some(Vec::new()) {
308 local_reg.add_package(ws, &pkg, &tarball)?;
309 }
310 }
311 outputs.push((pkg, opts, tarball));
312 }
313 }
314
315 if opts.verify {
318 for (pkg, opts, tarball) in &outputs {
319 verify::run_verify(ws, pkg, tarball, local_reg.as_ref(), opts)
320 .context("failed to verify package tarball")?
321 }
322 }
323
324 Ok(outputs)
325}
326
327fn get_registry(
334 gctx: &GlobalContext,
335 pkgs: &[&Package],
336 reg_or_index: Option<RegistryOrIndex>,
337) -> CargoResult<SourceId> {
338 let reg_or_index = match reg_or_index.clone() {
339 Some(r) => Some(r),
340 None => infer_registry(pkgs)?,
341 };
342
343 let reg = reg_or_index
345 .clone()
346 .unwrap_or_else(|| RegistryOrIndex::Registry(CRATES_IO_REGISTRY.to_owned()));
347 if let RegistryOrIndex::Registry(reg_name) = reg {
348 for pkg in pkgs {
349 if let Some(allowed) = pkg.publish().as_ref() {
350 if !allowed.is_empty() && !allowed.iter().any(|a| a == ®_name) {
354 bail!(
355 "`{}` cannot be packaged.\n\
356 The registry `{}` is not listed in the `package.publish` value in Cargo.toml.",
357 pkg.name(),
358 reg_name
359 );
360 }
361 }
362 }
363 }
364 Ok(ops::registry::get_source_id(gctx, reg_or_index.as_ref())?.replacement)
365}
366
367#[derive(Clone, Debug, Default)]
369pub(crate) struct LocalDependencies<T> {
370 pub packages: HashMap<PackageId, (Package, T)>,
371 pub graph: Graph<PackageId, ()>,
372}
373
374impl<T: Clone> LocalDependencies<T> {
375 pub fn sort(&self) -> Vec<(Package, T)> {
376 self.graph
377 .sort()
378 .into_iter()
379 .map(|name| self.packages[&name].clone())
380 .collect()
381 }
382
383 pub fn has_dependencies(&self) -> bool {
384 self.graph
385 .iter()
386 .any(|node| self.graph.edges(node).next().is_some())
387 }
388}
389
390fn local_deps<T>(packages: impl Iterator<Item = (Package, T)>) -> LocalDependencies<T> {
395 let packages: HashMap<PackageId, (Package, T)> = packages
396 .map(|(pkg, payload)| (pkg.package_id(), (pkg, payload)))
397 .collect();
398
399 let source_to_pkg: HashMap<_, _> = packages
404 .keys()
405 .map(|pkg_id| (pkg_id.source_id(), *pkg_id))
406 .collect();
407
408 let mut graph = Graph::new();
409 for (pkg, _payload) in packages.values() {
410 graph.add(pkg.package_id());
411 for dep in pkg.dependencies() {
412 if !dep.source_id().is_path() {
414 continue;
415 }
416
417 if dep.kind() == DepKind::Development && !dep.specified_req() {
420 continue;
421 };
422
423 if dep.source_id() == pkg.package_id().source_id() {
425 continue;
426 }
427
428 if let Some(dep_pkg) = source_to_pkg.get(&dep.source_id()) {
429 graph.link(pkg.package_id(), *dep_pkg);
430 }
431 }
432 }
433
434 LocalDependencies { packages, graph }
435}
436
437#[tracing::instrument(skip_all)]
439fn prepare_archive(
440 ws: &Workspace<'_>,
441 pkg: &Package,
442 opts: &PackageOpts<'_>,
443) -> CargoResult<Vec<ArchiveFile>> {
444 let gctx = ws.gctx();
445 let mut src = PathSource::new(pkg.root(), pkg.package_id().source_id(), gctx);
446 src.load()?;
447
448 if opts.check_metadata {
449 check_metadata(pkg, gctx)?;
450 }
451
452 if !pkg.manifest().exclude().is_empty() && !pkg.manifest().include().is_empty() {
453 gctx.shell().warn(
454 "both package.include and package.exclude are specified; \
455 the exclude list will be ignored",
456 )?;
457 }
458 let src_files = src.list_files(pkg)?;
459
460 let vcs_info = vcs::check_repo_state(pkg, &src_files, ws, &opts)?;
462
463 build_ar_list(ws, pkg, src_files, vcs_info, opts.include_lockfile)
464}
465
466#[tracing::instrument(skip_all)]
468fn build_ar_list(
469 ws: &Workspace<'_>,
470 pkg: &Package,
471 src_files: Vec<PathEntry>,
472 vcs_info: Option<vcs::VcsInfo>,
473 include_lockfile: bool,
474) -> CargoResult<Vec<ArchiveFile>> {
475 let mut result = HashMap::new();
476 let root = pkg.root();
477 for src_file in &src_files {
478 let rel_path = src_file.strip_prefix(&root)?;
479 check_filename(rel_path, &mut ws.gctx().shell())?;
480 let rel_str = rel_path.to_str().ok_or_else(|| {
481 anyhow::format_err!("non-utf8 path in source directory: {}", rel_path.display())
482 })?;
483 match rel_str {
484 "Cargo.lock" => continue,
485 VCS_INFO_FILE | ORIGINAL_MANIFEST_FILE => anyhow::bail!(
486 "invalid inclusion of reserved file name {} in package source",
487 rel_str
488 ),
489 _ => {
490 result
491 .entry(UncasedAscii::new(rel_str))
492 .or_insert_with(Vec::new)
493 .push(ArchiveFile {
494 rel_path: rel_path.to_owned(),
495 rel_str: rel_str.to_owned(),
496 contents: FileContents::OnDisk(src_file.to_path_buf()),
497 });
498 }
499 }
500 }
501
502 if result.remove(&UncasedAscii::new("Cargo.toml")).is_some() {
505 result
506 .entry(UncasedAscii::new(ORIGINAL_MANIFEST_FILE))
507 .or_insert_with(Vec::new)
508 .push(ArchiveFile {
509 rel_path: PathBuf::from(ORIGINAL_MANIFEST_FILE),
510 rel_str: ORIGINAL_MANIFEST_FILE.to_string(),
511 contents: FileContents::OnDisk(pkg.manifest_path().to_owned()),
512 });
513 result
514 .entry(UncasedAscii::new("Cargo.toml"))
515 .or_insert_with(Vec::new)
516 .push(ArchiveFile {
517 rel_path: PathBuf::from("Cargo.toml"),
518 rel_str: "Cargo.toml".to_string(),
519 contents: FileContents::Generated(GeneratedFile::Manifest(
520 pkg.manifest_path().to_owned(),
521 )),
522 });
523 } else {
524 ws.gctx().shell().warn(&format!(
525 "no `Cargo.toml` file found when packaging `{}` (note the case of the file name).",
526 pkg.name()
527 ))?;
528 }
529
530 if include_lockfile {
531 let lockfile_path = ws.lock_root().as_path_unlocked().join(LOCKFILE_NAME);
532 let lockfile_path = lockfile_path.exists().then_some(lockfile_path);
533 let rel_str = "Cargo.lock";
534 result
535 .entry(UncasedAscii::new(rel_str))
536 .or_insert_with(Vec::new)
537 .push(ArchiveFile {
538 rel_path: PathBuf::from(rel_str),
539 rel_str: rel_str.to_string(),
540 contents: FileContents::Generated(GeneratedFile::Lockfile(lockfile_path)),
541 });
542 }
543
544 if let Some(vcs_info) = vcs_info {
545 let rel_str = VCS_INFO_FILE;
546 result
547 .entry(UncasedAscii::new(rel_str))
548 .or_insert_with(Vec::new)
549 .push(ArchiveFile {
550 rel_path: PathBuf::from(rel_str),
551 rel_str: rel_str.to_string(),
552 contents: FileContents::Generated(GeneratedFile::VcsInfo(vcs_info)),
553 });
554 }
555
556 let mut invalid_manifest_field: Vec<String> = vec![];
557
558 let mut result = result.into_values().flatten().collect();
559 if let Some(license_file) = &pkg.manifest().metadata().license_file {
560 let license_path = Path::new(license_file);
561 let abs_file_path = paths::normalize_path(&pkg.root().join(license_path));
562 if abs_file_path.is_file() {
563 check_for_file_and_add(
564 "license-file",
565 license_path,
566 abs_file_path,
567 pkg,
568 &mut result,
569 ws,
570 )?;
571 } else {
572 error_on_nonexistent_file(
573 &pkg,
574 &license_path,
575 "license-file",
576 &mut invalid_manifest_field,
577 );
578 }
579 }
580 if let Some(readme) = &pkg.manifest().metadata().readme {
581 let readme_path = Path::new(readme);
582 let abs_file_path = paths::normalize_path(&pkg.root().join(readme_path));
583 if abs_file_path.is_file() {
584 check_for_file_and_add("readme", readme_path, abs_file_path, pkg, &mut result, ws)?;
585 } else {
586 error_on_nonexistent_file(&pkg, &readme_path, "readme", &mut invalid_manifest_field);
587 }
588 }
589
590 if !invalid_manifest_field.is_empty() {
591 return Err(anyhow::anyhow!(invalid_manifest_field.join("\n")));
592 }
593
594 for t in pkg
595 .manifest()
596 .targets()
597 .iter()
598 .filter(|t| t.is_custom_build())
599 {
600 if let Some(custome_build_path) = t.src_path().path() {
601 let abs_custome_build_path =
602 paths::normalize_path(&pkg.root().join(custome_build_path));
603 if !abs_custome_build_path.is_file() || !abs_custome_build_path.starts_with(pkg.root())
604 {
605 error_custom_build_file_not_in_package(pkg, &abs_custome_build_path, t)?;
606 }
607 }
608 }
609
610 result.sort_unstable_by(|a, b| a.rel_path.cmp(&b.rel_path));
611
612 Ok(result)
613}
614
615fn check_for_file_and_add(
616 label: &str,
617 file_path: &Path,
618 abs_file_path: PathBuf,
619 pkg: &Package,
620 result: &mut Vec<ArchiveFile>,
621 ws: &Workspace<'_>,
622) -> CargoResult<()> {
623 match abs_file_path.strip_prefix(&pkg.root()) {
624 Ok(rel_file_path) => {
625 if !result.iter().any(|ar| ar.rel_path == rel_file_path) {
626 result.push(ArchiveFile {
627 rel_path: rel_file_path.to_path_buf(),
628 rel_str: rel_file_path
629 .to_str()
630 .expect("everything was utf8")
631 .to_string(),
632 contents: FileContents::OnDisk(abs_file_path),
633 })
634 }
635 }
636 Err(_) => {
637 let file_name = file_path.file_name().unwrap();
639 if result.iter().any(|ar| ar.rel_path == file_name) {
640 ws.gctx().shell().warn(&format!(
641 "{} `{}` appears to be a path outside of the package, \
642 but there is already a file named `{}` in the root of the package. \
643 The archived crate will contain the copy in the root of the package. \
644 Update the {} to point to the path relative \
645 to the root of the package to remove this warning.",
646 label,
647 file_path.display(),
648 file_name.to_str().unwrap(),
649 label,
650 ))?;
651 } else {
652 result.push(ArchiveFile {
653 rel_path: PathBuf::from(file_name),
654 rel_str: file_name.to_str().unwrap().to_string(),
655 contents: FileContents::OnDisk(abs_file_path),
656 })
657 }
658 }
659 }
660 Ok(())
661}
662
663fn error_on_nonexistent_file(
664 pkg: &Package,
665 path: &Path,
666 manifest_key_name: &'static str,
667 invalid: &mut Vec<String>,
668) {
669 let rel_msg = if path.is_absolute() {
670 "".to_string()
671 } else {
672 format!(" (relative to `{}`)", pkg.root().display())
673 };
674
675 let msg = format!(
676 "{manifest_key_name} `{}` does not appear to exist{}.\n\
677 Please update the {manifest_key_name} setting in the manifest at `{}`.",
678 path.display(),
679 rel_msg,
680 pkg.manifest_path().display()
681 );
682
683 invalid.push(msg);
684}
685
686fn error_custom_build_file_not_in_package(
687 pkg: &Package,
688 path: &Path,
689 target: &Target,
690) -> CargoResult<Vec<ArchiveFile>> {
691 let tip = {
692 let description_name = target.description_named();
693 if path.is_file() {
694 format!("the source file of {description_name} doesn't appear to be a path inside of the package.\n\
695 It is at `{}`, whereas the root the package is `{}`.\n",
696 path.display(), pkg.root().display()
697 )
698 } else {
699 format!("the source file of {description_name} doesn't appear to exist.\n",)
700 }
701 };
702 let msg = format!(
703 "{}\
704 This may cause issue during packaging, as modules resolution and resources included via macros are often relative to the path of source files.\n\
705 Please update the `build` setting in the manifest at `{}` and point to a path inside the root of the package.",
706 tip, pkg.manifest_path().display()
707 );
708 anyhow::bail!(msg)
709}
710
711fn build_lock(
713 ws: &Workspace<'_>,
714 publish_pkg: &Package,
715 local_reg: Option<&TmpRegistry<'_>>,
716) -> CargoResult<String> {
717 let gctx = ws.gctx();
718 let orig_resolve = ops::load_pkg_lockfile(ws)?;
719
720 let mut tmp_ws = Workspace::ephemeral(publish_pkg.clone(), ws.gctx(), None, true)?;
721
722 if let Some(local_reg) = local_reg {
726 tmp_ws.add_local_overlay(
727 local_reg.upstream,
728 local_reg.root.as_path_unlocked().to_owned(),
729 );
730 }
731 let mut tmp_reg = tmp_ws.package_registry()?;
732
733 let mut new_resolve = ops::resolve_with_previous(
734 &mut tmp_reg,
735 &tmp_ws,
736 &CliFeatures::new_all(true),
737 HasDevUnits::Yes,
738 orig_resolve.as_ref(),
739 None,
740 &[],
741 true,
742 )?;
743
744 let pkg_set = ops::get_resolved_packages(&new_resolve, tmp_reg)?;
745
746 if let Some(orig_resolve) = orig_resolve {
747 compare_resolve(gctx, tmp_ws.current()?, &orig_resolve, &new_resolve)?;
748 }
749 check_yanked(
750 gctx,
751 &pkg_set,
752 &new_resolve,
753 "consider updating to a version that is not yanked",
754 )?;
755
756 ops::resolve_to_string(&tmp_ws, &mut new_resolve)
757}
758
759fn check_metadata(pkg: &Package, gctx: &GlobalContext) -> CargoResult<()> {
762 let md = pkg.manifest().metadata();
763
764 let mut missing = vec![];
765
766 macro_rules! lacking {
767 ($( $($field: ident)||* ),*) => {{
768 $(
769 if $(md.$field.as_ref().map_or(true, |s| s.is_empty()))&&* {
770 $(missing.push(stringify!($field).replace("_", "-"));)*
771 }
772 )*
773 }}
774 }
775 lacking!(
776 description,
777 license || license_file,
778 documentation || homepage || repository
779 );
780
781 if !missing.is_empty() {
782 let mut things = missing[..missing.len() - 1].join(", ");
783 if !things.is_empty() {
786 things.push_str(" or ");
787 }
788 things.push_str(missing.last().unwrap());
789
790 gctx.shell().warn(&format!(
791 "manifest has no {things}.\n\
792 See https://doc.rust-lang.org/cargo/reference/manifest.html#package-metadata for more info.",
793 things = things
794 ))?
795 }
796
797 Ok(())
798}
799
800fn tar(
804 ws: &Workspace<'_>,
805 pkg: &Package,
806 local_reg: Option<&TmpRegistry<'_>>,
807 ar_files: Vec<ArchiveFile>,
808 dst: &File,
809 filename: &str,
810) -> CargoResult<u64> {
811 let filename = Path::new(filename);
813 let encoder = GzBuilder::new()
814 .filename(paths::path2bytes(filename)?)
815 .write(dst, Compression::best());
816
817 let mut ar = Builder::new(encoder);
819 ar.sparse(false);
820 let gctx = ws.gctx();
821
822 let base_name = format!("{}-{}", pkg.name(), pkg.version());
823 let base_path = Path::new(&base_name);
824 let included = ar_files
825 .iter()
826 .map(|ar_file| ar_file.rel_path.clone())
827 .collect::<Vec<_>>();
828 let publish_pkg = prepare_for_publish(pkg, ws, Some(&included))?;
829
830 let mut uncompressed_size = 0;
831 for ar_file in ar_files {
832 let ArchiveFile {
833 rel_path,
834 rel_str,
835 contents,
836 } = ar_file;
837 let ar_path = base_path.join(&rel_path);
838 gctx.shell()
839 .verbose(|shell| shell.status("Archiving", &rel_str))?;
840 let mut header = Header::new_gnu();
841 match contents {
842 FileContents::OnDisk(disk_path) => {
843 let mut file = File::open(&disk_path).with_context(|| {
844 format!("failed to open for archiving: `{}`", disk_path.display())
845 })?;
846 let metadata = file.metadata().with_context(|| {
847 format!("could not learn metadata for: `{}`", disk_path.display())
848 })?;
849 header.set_metadata_in_mode(&metadata, HeaderMode::Deterministic);
850 header.set_cksum();
851 ar.append_data(&mut header, &ar_path, &mut file)
852 .with_context(|| {
853 format!("could not archive source file `{}`", disk_path.display())
854 })?;
855 uncompressed_size += metadata.len() as u64;
856 }
857 FileContents::Generated(generated_kind) => {
858 let contents = match generated_kind {
859 GeneratedFile::Manifest(_) => {
860 publish_pkg.manifest().to_normalized_contents()?
861 }
862 GeneratedFile::Lockfile(_) => build_lock(ws, &publish_pkg, local_reg)?,
863 GeneratedFile::VcsInfo(ref s) => serde_json::to_string_pretty(s)?,
864 };
865 header.set_entry_type(EntryType::file());
866 header.set_mode(0o644);
867 header.set_size(contents.len() as u64);
868 header.set_mtime(1);
870 header.set_cksum();
871 ar.append_data(&mut header, &ar_path, contents.as_bytes())
872 .with_context(|| format!("could not archive source file `{}`", rel_str))?;
873 uncompressed_size += contents.len() as u64;
874 }
875 }
876 }
877
878 let encoder = ar.into_inner()?;
879 encoder.finish()?;
880 Ok(uncompressed_size)
881}
882
883fn compare_resolve(
885 gctx: &GlobalContext,
886 current_pkg: &Package,
887 orig_resolve: &Resolve,
888 new_resolve: &Resolve,
889) -> CargoResult<()> {
890 if gctx.shell().verbosity() != Verbosity::Verbose {
891 return Ok(());
892 }
893 let new_set: BTreeSet<PackageId> = new_resolve.iter().collect();
894 let orig_set: BTreeSet<PackageId> = orig_resolve.iter().collect();
895 let added = new_set.difference(&orig_set);
896 let removed: Vec<&PackageId> = orig_set.difference(&new_set).collect();
899 for pkg_id in added {
900 if pkg_id.name() == current_pkg.name() && pkg_id.version() == current_pkg.version() {
901 continue;
904 }
905 let removed_candidates: Vec<&PackageId> = removed
908 .iter()
909 .filter(|orig_pkg_id| {
910 orig_pkg_id.name() == pkg_id.name() && orig_pkg_id.version() == pkg_id.version()
911 })
912 .cloned()
913 .collect();
914 let extra = match removed_candidates.len() {
915 0 => {
916 let previous_versions: Vec<&PackageId> = removed
918 .iter()
919 .filter(|orig_pkg_id| orig_pkg_id.name() == pkg_id.name())
920 .cloned()
921 .collect();
922 match previous_versions.len() {
923 0 => String::new(),
924 1 => format!(
925 ", previous version was `{}`",
926 previous_versions[0].version()
927 ),
928 _ => format!(
929 ", previous versions were: {}",
930 previous_versions
931 .iter()
932 .map(|pkg_id| format!("`{}`", pkg_id.version()))
933 .collect::<Vec<_>>()
934 .join(", ")
935 ),
936 }
937 }
938 1 => {
939 format!(
943 ", was originally sourced from `{}`",
944 removed_candidates[0].source_id()
945 )
946 }
947 _ => {
948 let comma_list = removed_candidates
951 .iter()
952 .map(|pkg_id| format!("`{}`", pkg_id.source_id()))
953 .collect::<Vec<_>>()
954 .join(", ");
955 format!(
956 ", was originally sourced from one of these sources: {}",
957 comma_list
958 )
959 }
960 };
961 let msg = format!(
962 "package `{}` added to the packaged Cargo.lock file{}",
963 pkg_id, extra
964 );
965 gctx.shell().note(msg)?;
966 }
967 Ok(())
968}
969
970pub fn check_yanked(
971 gctx: &GlobalContext,
972 pkg_set: &PackageSet<'_>,
973 resolve: &Resolve,
974 hint: &str,
975) -> CargoResult<()> {
976 let _lock = gctx.acquire_package_cache_lock(CacheLockMode::DownloadExclusive)?;
979
980 let mut sources = pkg_set.sources_mut();
981 let mut pending: Vec<PackageId> = resolve.iter().collect();
982 let mut results = Vec::new();
983 for (_id, source) in sources.sources_mut() {
984 source.invalidate_cache();
985 }
986 while !pending.is_empty() {
987 pending.retain(|pkg_id| {
988 if let Some(source) = sources.get_mut(pkg_id.source_id()) {
989 match source.is_yanked(*pkg_id) {
990 Poll::Ready(result) => results.push((*pkg_id, result)),
991 Poll::Pending => return true,
992 }
993 }
994 false
995 });
996 for (_id, source) in sources.sources_mut() {
997 source.block_until_ready()?;
998 }
999 }
1000
1001 for (pkg_id, is_yanked) in results {
1002 if is_yanked? {
1003 gctx.shell().warn(format!(
1004 "package `{}` in Cargo.lock is yanked in registry `{}`, {}",
1005 pkg_id,
1006 pkg_id.source_id().display_registry_name(),
1007 hint
1008 ))?;
1009 }
1010 }
1011 Ok(())
1012}
1013
1014fn check_filename(file: &Path, shell: &mut Shell) -> CargoResult<()> {
1021 let Some(name) = file.file_name() else {
1022 return Ok(());
1023 };
1024 let Some(name) = name.to_str() else {
1025 anyhow::bail!(
1026 "path does not have a unicode filename which may not unpack \
1027 on all platforms: {}",
1028 file.display()
1029 )
1030 };
1031 let bad_chars = ['/', '\\', '<', '>', ':', '"', '|', '?', '*'];
1032 if let Some(c) = bad_chars.iter().find(|c| name.contains(**c)) {
1033 anyhow::bail!(
1034 "cannot package a filename with a special character `{}`: {}",
1035 c,
1036 file.display()
1037 )
1038 }
1039 if restricted_names::is_windows_reserved_path(file) {
1040 shell.warn(format!(
1041 "file {} is a reserved Windows filename, \
1042 it will not work on Windows platforms",
1043 file.display()
1044 ))?;
1045 }
1046 Ok(())
1047}
1048
1049struct TmpRegistry<'a> {
1053 gctx: &'a GlobalContext,
1054 upstream: SourceId,
1055 root: Filesystem,
1056 _lock: FileLock,
1057}
1058
1059impl<'a> TmpRegistry<'a> {
1060 fn new(gctx: &'a GlobalContext, root: Filesystem, upstream: SourceId) -> CargoResult<Self> {
1061 root.create_dir()?;
1062 let _lock = root.open_rw_exclusive_create(".cargo-lock", gctx, "temporary registry")?;
1063 let slf = Self {
1064 gctx,
1065 root,
1066 upstream,
1067 _lock,
1068 };
1069 let index_path = slf.index_path().into_path_unlocked();
1071 if index_path.exists() {
1072 paths::remove_dir_all(index_path)?;
1073 }
1074 slf.index_path().create_dir()?;
1075 Ok(slf)
1076 }
1077
1078 fn index_path(&self) -> Filesystem {
1079 self.root.join("index")
1080 }
1081
1082 fn add_package(
1083 &mut self,
1084 ws: &Workspace<'_>,
1085 package: &Package,
1086 tar: &FileLock,
1087 ) -> CargoResult<()> {
1088 debug!(
1089 "adding package {}@{} to local overlay at {}",
1090 package.name(),
1091 package.version(),
1092 self.root.as_path_unlocked().display()
1093 );
1094 {
1095 let mut tar_copy = self.root.open_rw_exclusive_create(
1096 package.package_id().tarball_name(),
1097 self.gctx,
1098 "temporary package registry",
1099 )?;
1100 tar.file().seek(SeekFrom::Start(0))?;
1101 std::io::copy(&mut tar.file(), &mut tar_copy)?;
1102 tar_copy.flush()?;
1103 }
1104
1105 let new_crate = super::registry::prepare_transmit(self.gctx, ws, package, self.upstream)?;
1106
1107 tar.file().seek(SeekFrom::Start(0))?;
1108 let cksum = cargo_util::Sha256::new()
1109 .update_file(tar.file())?
1110 .finish_hex();
1111
1112 let deps: Vec<_> = new_crate
1113 .deps
1114 .into_iter()
1115 .map(|dep| {
1116 let name = dep
1117 .explicit_name_in_toml
1118 .clone()
1119 .unwrap_or_else(|| dep.name.clone())
1120 .into();
1121 let package = dep
1122 .explicit_name_in_toml
1123 .as_ref()
1124 .map(|_| dep.name.clone().into());
1125 RegistryDependency {
1126 name: name,
1127 req: dep.version_req.into(),
1128 features: dep.features.into_iter().map(|x| x.into()).collect(),
1129 optional: dep.optional,
1130 default_features: dep.default_features,
1131 target: dep.target.map(|x| x.into()),
1132 kind: Some(dep.kind.into()),
1133 registry: dep.registry.map(|x| x.into()),
1134 package: package,
1135 public: None,
1136 artifact: dep
1137 .artifact
1138 .map(|xs| xs.into_iter().map(|x| x.into()).collect()),
1139 bindep_target: dep.bindep_target.map(|x| x.into()),
1140 lib: dep.lib,
1141 }
1142 })
1143 .collect();
1144
1145 let index_line = serde_json::to_string(&IndexPackage {
1146 name: new_crate.name.into(),
1147 vers: package.version().clone(),
1148 deps,
1149 features: new_crate
1150 .features
1151 .into_iter()
1152 .map(|(k, v)| (k.into(), v.into_iter().map(|x| x.into()).collect()))
1153 .collect(),
1154 features2: None,
1155 cksum,
1156 yanked: None,
1157 links: new_crate.links.map(|x| x.into()),
1158 rust_version: None,
1159 v: Some(2),
1160 })?;
1161
1162 let file =
1163 cargo_util::registry::make_dep_path(&package.name().as_str().to_lowercase(), false);
1164 let mut dst = self.index_path().open_rw_exclusive_create(
1165 file,
1166 self.gctx,
1167 "temporary package registry",
1168 )?;
1169 dst.write_all(index_line.as_bytes())?;
1170 Ok(())
1171 }
1172}