|
|
@ -1,123 +1,35 @@ |
|
|
|
#![allow(unused_labels)] |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
use std::path::{Path, PathBuf}; |
|
|
|
use std::path::{Path, PathBuf}; |
|
|
|
use std::collections::{BTreeMap, HashMap}; |
|
|
|
use std::collections::BTreeMap; |
|
|
|
use std::io::prelude::*; |
|
|
|
use std::borrow::Cow; |
|
|
|
use std::time::*; |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
use serde::{Serialize, Deserialize}; |
|
|
|
use serde::Deserialize; |
|
|
|
use clap::Parser; |
|
|
|
use clap::Parser; |
|
|
|
use tracing::{debug, error, info, trace, warn}; |
|
|
|
use tracing::{debug, error, info, warn}; |
|
|
|
use tracing_subscriber::filter::EnvFilter; |
|
|
|
use tracing_subscriber::filter::EnvFilter; |
|
|
|
use url::Url; |
|
|
|
use url::Url; |
|
|
|
use anyhow::{anyhow, bail, Error, Context}; |
|
|
|
use anyhow::{anyhow, bail, Error}; |
|
|
|
use semver::Version; |
|
|
|
|
|
|
|
use futures::stream::StreamExt; |
|
|
|
|
|
|
|
use tokio::io::AsyncBufReadExt; |
|
|
|
|
|
|
|
use reqwest::header::AUTHORIZATION; |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
#[derive(Parser, Debug)] |
|
|
|
|
|
|
|
#[clap(author, version, global_setting(clap::AppSettings::DeriveDisplayOrder))] |
|
|
|
|
|
|
|
struct Opt { |
|
|
|
|
|
|
|
/// Config file with source directories and destination registry info
|
|
|
|
|
|
|
|
#[clap(short, long, value_name = "PATH")] |
|
|
|
|
|
|
|
pub config_file: PathBuf, |
|
|
|
|
|
|
|
/// Perform all the work of generating `cargo publish` payloads,
|
|
|
|
|
|
|
|
/// but don't send them to the destination registry server
|
|
|
|
|
|
|
|
#[clap(long)] |
|
|
|
|
|
|
|
pub dry_run: bool, |
|
|
|
|
|
|
|
/// Load config file, validate the settings, and display the final loaded content
|
|
|
|
|
|
|
|
/// to stdout, then exit
|
|
|
|
|
|
|
|
#[clap(long)] |
|
|
|
|
|
|
|
pub validate: bool, |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
/// Use to limit which crates from the source registry are published to the
|
|
|
|
|
|
|
|
/// destination registry. Expects a regular expression which will be matched
|
|
|
|
|
|
|
|
/// against the names of crates. Only crates with names that match the regex
|
|
|
|
|
|
|
|
/// will be published. This field may also be specified at the top level of
|
|
|
|
|
|
|
|
/// the config file.
|
|
|
|
|
|
|
|
#[clap(long, value_name = "REGEX", alias = "filter")] |
|
|
|
|
|
|
|
pub filter_crates: Option<String>, |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
#[derive(Debug, Clone, Deserialize)] |
|
|
|
#[derive(Debug, Clone, Deserialize)] |
|
|
|
#[serde(rename_all = "kebab-case")] |
|
|
|
#[serde(rename_all = "kebab-case")] |
|
|
|
pub struct DestinationRegistryConfig { |
|
|
|
pub struct DestinationRegistryConfig { |
|
|
|
#[serde(alias = "api")] |
|
|
|
|
|
|
|
pub api_url: Url, |
|
|
|
pub api_url: Url, |
|
|
|
#[serde(alias = "token")] |
|
|
|
pub token: String, |
|
|
|
pub auth_token: String, |
|
|
|
|
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
#[derive(Debug, Clone, Deserialize)] |
|
|
|
#[derive(Debug, Clone, Deserialize)] |
|
|
|
#[serde(rename_all = "kebab-case")] |
|
|
|
#[serde(rename_all = "kebab-case")] |
|
|
|
pub struct SourceRegistryConfig { |
|
|
|
pub struct SourceRegistryConfig { |
|
|
|
#[serde(alias = "index")] |
|
|
|
|
|
|
|
pub index_dir: PathBuf, |
|
|
|
pub index_dir: PathBuf, |
|
|
|
#[serde(alias = "crate-files")] |
|
|
|
|
|
|
|
pub crate_files_dir: PathBuf, |
|
|
|
pub crate_files_dir: PathBuf, |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
#[derive(Deserialize, Debug, Clone)] |
|
|
|
|
|
|
|
#[serde(rename_all = "kebab-case")] |
|
|
|
|
|
|
|
pub struct HttpConfig { |
|
|
|
|
|
|
|
/// Value of user-agent HTTP header
|
|
|
|
|
|
|
|
#[serde(default = "default_user_agent")] |
|
|
|
|
|
|
|
pub user_agent: String, |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
const DEFAULT_USER_AGENT: &str = concat!("shipyard.rs-publish-tool/v", env!("CARGO_PKG_VERSION")); |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
fn default_user_agent() -> String { |
|
|
|
|
|
|
|
DEFAULT_USER_AGENT.to_string() |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
impl Default for HttpConfig { |
|
|
|
|
|
|
|
fn default() -> Self { |
|
|
|
|
|
|
|
Self { |
|
|
|
|
|
|
|
user_agent: default_user_agent(), |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
#[derive(Debug, Clone, Deserialize)] |
|
|
|
#[derive(Debug, Clone, Deserialize)] |
|
|
|
#[serde(rename_all = "kebab-case")] |
|
|
|
#[serde(rename_all = "kebab-case")] |
|
|
|
pub struct Config { |
|
|
|
pub struct Config { |
|
|
|
/// Do everything except actually publish to the destination registry. Can also be
|
|
|
|
|
|
|
|
/// toggled using the --dry-run command line flag.
|
|
|
|
|
|
|
|
#[serde(default)] |
|
|
|
|
|
|
|
pub dry_run: bool, |
|
|
|
|
|
|
|
/// Local directories with source registry files
|
|
|
|
|
|
|
|
#[serde(alias = "source")] |
|
|
|
#[serde(alias = "source")] |
|
|
|
pub src: SourceRegistryConfig, |
|
|
|
pub src: SourceRegistryConfig, |
|
|
|
/// Server information and authentication needed to publish to the
|
|
|
|
|
|
|
|
/// destination registry
|
|
|
|
|
|
|
|
#[serde(alias = "destination")] |
|
|
|
#[serde(alias = "destination")] |
|
|
|
pub dst: DestinationRegistryConfig, |
|
|
|
pub dst: DestinationRegistryConfig, |
|
|
|
/// Settings controlling the HTTP publish requests to the destination registry
|
|
|
|
|
|
|
|
#[serde(default)] |
|
|
|
|
|
|
|
pub http: HttpConfig, |
|
|
|
|
|
|
|
/// Use to limit which crates from the source registry are published to the
|
|
|
|
|
|
|
|
/// destination registry. Expects a regular expression which will be matched
|
|
|
|
|
|
|
|
/// against the names of crates. Only crates with names that match the regex
|
|
|
|
|
|
|
|
/// will be published.
|
|
|
|
|
|
|
|
#[serde(default, alias = "filter")] |
|
|
|
|
|
|
|
pub filter_crates: Option<String>, |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
impl Config { |
|
|
|
|
|
|
|
pub fn compile_filter(&self) -> Result<Option<regex::Regex>, Error> { |
|
|
|
|
|
|
|
match self.filter_crates.as_ref() { |
|
|
|
|
|
|
|
Some(regex) => { |
|
|
|
|
|
|
|
let compiled = regex::Regex::new(regex).map_err(|e| { |
|
|
|
|
|
|
|
error!(%regex, err = ?e, "regex failed to compile: {}", e); |
|
|
|
|
|
|
|
e |
|
|
|
|
|
|
|
})?; |
|
|
|
|
|
|
|
Ok(Some(compiled)) |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
None => Ok(None), |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
/// fields we need from Cargo.toml [package] section to combine with IndexMeta
|
|
|
|
/// fields we need from Cargo.toml [package] section to combine with IndexMeta
|
|
|
@ -139,7 +51,6 @@ pub struct PackageStub { |
|
|
|
pub repository: Option<String>, |
|
|
|
pub repository: Option<String>, |
|
|
|
pub homepage: Option<String>, |
|
|
|
pub homepage: Option<String>, |
|
|
|
pub documentation: Option<String>, |
|
|
|
pub documentation: Option<String>, |
|
|
|
pub links: Option<String>, |
|
|
|
|
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
/// for parsing Cargo.toml to extract missing PublishMeta fields that do not appear
|
|
|
|
/// for parsing Cargo.toml to extract missing PublishMeta fields that do not appear
|
|
|
@ -152,34 +63,43 @@ pub struct ManifestStub { |
|
|
|
/// full definition of cargo publish json
|
|
|
|
/// full definition of cargo publish json
|
|
|
|
#[derive(Debug, Clone, Serialize, Deserialize, Eq, PartialEq)] |
|
|
|
#[derive(Debug, Clone, Serialize, Deserialize, Eq, PartialEq)] |
|
|
|
pub struct PublishMeta { |
|
|
|
pub struct PublishMeta { |
|
|
|
|
|
|
|
#[serde(borrow)] |
|
|
|
pub name: String, |
|
|
|
pub name: String, |
|
|
|
#[serde(alias = "version")] |
|
|
|
#[serde(alias = "version")] |
|
|
|
pub vers: semver::Version, |
|
|
|
pub vers: semver::Version, |
|
|
|
#[serde(alias = "dependencies")] |
|
|
|
#[serde(alias = "dependencies")] |
|
|
|
#[serde(default)] |
|
|
|
#[serde(default)] |
|
|
|
pub deps: Vec<PublishDependency>, |
|
|
|
pub deps: Vec<PublishDependency>, |
|
|
|
#[serde(default)] |
|
|
|
#[serde(default, borrow)] |
|
|
|
pub features: BTreeMap<String, Vec<String>>, |
|
|
|
pub features: BTreeMap<String, Vec<String>>, |
|
|
|
#[serde(default)] |
|
|
|
#[serde(default, borrow)] |
|
|
|
pub authors: Vec<String>, |
|
|
|
pub authors: Vec<String>, |
|
|
|
|
|
|
|
#[serde(borrow)] |
|
|
|
pub description: Option<String>, |
|
|
|
pub description: Option<String>, |
|
|
|
|
|
|
|
#[serde(borrow)] |
|
|
|
pub documentation: Option<String>, |
|
|
|
pub documentation: Option<String>, |
|
|
|
|
|
|
|
#[serde(borrow)] |
|
|
|
pub homepage: Option<String>, |
|
|
|
pub homepage: Option<String>, |
|
|
|
|
|
|
|
#[serde(borrow)] |
|
|
|
pub readme: Option<String>, |
|
|
|
pub readme: Option<String>, |
|
|
|
pub readme_file: Option<PathBuf>, |
|
|
|
#[serde(borrow)] |
|
|
|
#[serde(default)] |
|
|
|
pub readme_file: Option<String>, |
|
|
|
|
|
|
|
#[serde(default, borrow)] |
|
|
|
pub keywords: Vec<String>, |
|
|
|
pub keywords: Vec<String>, |
|
|
|
#[serde(default)] |
|
|
|
#[serde(default, borrow)] |
|
|
|
pub categories: Vec<String>, |
|
|
|
pub categories: Vec<String>, |
|
|
|
|
|
|
|
#[serde(borrow)] |
|
|
|
pub license: Option<String>, |
|
|
|
pub license: Option<String>, |
|
|
|
pub license_file: Option<PathBuf>, |
|
|
|
#[serde(borrow)] |
|
|
|
|
|
|
|
pub license_file: Option<String>, |
|
|
|
|
|
|
|
#[serde(borrow)] |
|
|
|
pub repository: Option<String>, |
|
|
|
pub repository: Option<String>, |
|
|
|
#[serde(skip_serializing_if = "Option::is_none")] |
|
|
|
#[serde(skip_serializing_if = "Option::is_none", borrow)] |
|
|
|
pub links: Option<String>, |
|
|
|
pub links: Option<String>, |
|
|
|
#[serde(skip_serializing_if = "Option::is_none")] |
|
|
|
#[serde(skip_serializing_if = "Option::is_none", borrow)] |
|
|
|
pub badges: Option<BTreeMap<String, String>>, |
|
|
|
pub badges: Option<BTreeMap<String, String>>, |
|
|
|
/// from ancient cargo versions
|
|
|
|
/// from ancient cargo versions
|
|
|
|
#[serde(skip_serializing_if = "Option::is_none")] |
|
|
|
#[serde(skip_serializing_if = "Option::is_none", borrow)] |
|
|
|
pub features2: Option<BTreeMap<String, Vec<String>>>, |
|
|
|
pub features2: Option<BTreeMap<String, Vec<String>>>, |
|
|
|
/// from ancient cargo versions
|
|
|
|
/// from ancient cargo versions
|
|
|
|
#[serde(skip_serializing_if = "Option::is_none")] |
|
|
|
#[serde(skip_serializing_if = "Option::is_none")] |
|
|
@ -190,47 +110,35 @@ pub struct PublishMeta { |
|
|
|
pub struct PublishDependency { |
|
|
|
pub struct PublishDependency { |
|
|
|
pub optional: bool, |
|
|
|
pub optional: bool, |
|
|
|
pub default_features: bool, |
|
|
|
pub default_features: bool, |
|
|
|
|
|
|
|
#[serde(borrow)] |
|
|
|
pub name: String, |
|
|
|
pub name: String, |
|
|
|
|
|
|
|
#[serde(borrow)] |
|
|
|
pub features: Vec<String>, |
|
|
|
pub features: Vec<String>, |
|
|
|
// cargo and crates-io have this as string
|
|
|
|
// cargo and crates-io have this as string
|
|
|
|
#[serde(alias = "req")] |
|
|
|
#[serde(alias = "req")] |
|
|
|
pub version_req: semver::VersionReq, |
|
|
|
pub version_req: semver::VersionReq, |
|
|
|
|
|
|
|
#[serde(borrow)] |
|
|
|
pub target: Option<String>, |
|
|
|
pub target: Option<String>, |
|
|
|
// crates-io has this as option
|
|
|
|
// crates-io has this as option
|
|
|
|
pub kind: DependencyKind, |
|
|
|
pub kind: PublishDependencyKind, |
|
|
|
#[serde(skip_serializing_if = "Option::is_none")] |
|
|
|
#[serde(skip_serializing_if = "Option::is_none", borrow)] |
|
|
|
pub registry: Option<String>, |
|
|
|
pub registry: Option<String>, |
|
|
|
#[serde(skip_serializing_if = "Option::is_none")] |
|
|
|
#[serde(skip_serializing_if = "Option::is_none", borrow)] |
|
|
|
pub explicit_name_in_toml: Option<String>, |
|
|
|
pub explicit_name_in_toml: Option<String>, |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
impl From<IndexDependency> for PublishDependency { |
|
|
|
|
|
|
|
fn from(dep: IndexDependency) -> Self { |
|
|
|
|
|
|
|
Self { |
|
|
|
|
|
|
|
name: dep.name, |
|
|
|
|
|
|
|
features: dep.features, |
|
|
|
|
|
|
|
default_features: dep.default_features, |
|
|
|
|
|
|
|
optional: dep.optional, |
|
|
|
|
|
|
|
target: dep.target, |
|
|
|
|
|
|
|
kind: dep.kind, |
|
|
|
|
|
|
|
registry: dep.registry, |
|
|
|
|
|
|
|
version_req: dep.req, |
|
|
|
|
|
|
|
explicit_name_in_toml: dep.package, |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
#[derive(Debug, Clone, Serialize, Deserialize, Eq, PartialEq)] |
|
|
|
#[derive(Debug, Clone, Serialize, Deserialize, Eq, PartialEq)] |
|
|
|
pub struct IndexMeta { |
|
|
|
pub struct IndexMeta { |
|
|
|
// same everything as publish metadata
|
|
|
|
// same everything as publish metadata
|
|
|
|
|
|
|
|
#[serde(borrow)] |
|
|
|
pub name: String, |
|
|
|
pub name: String, |
|
|
|
#[serde(alias = "version")] |
|
|
|
#[serde(alias = "version")] |
|
|
|
pub vers: semver::Version, |
|
|
|
pub vers: semver::Version, |
|
|
|
#[serde(alias = "dependencies")] |
|
|
|
#[serde(alias = "dependencies", borrow)] |
|
|
|
pub features: BTreeMap<String, Vec<String>>, |
|
|
|
pub features: BTreeMap<String, Vec<String>>, |
|
|
|
#[serde(skip_serializing_if = "Option::is_none")] |
|
|
|
#[serde(skip_serializing_if = "Option::is_none", borrow)] |
|
|
|
pub links: Option<String>, |
|
|
|
pub links: Option<String>, |
|
|
|
#[serde(skip_serializing_if = "Option::is_none")] |
|
|
|
#[serde(skip_serializing_if = "Option::is_none", borrow)] |
|
|
|
pub badges: Option<BTreeMap<String, String>>, |
|
|
|
pub badges: Option<BTreeMap<String, String>>, |
|
|
|
|
|
|
|
|
|
|
|
// modified format/field names
|
|
|
|
// modified format/field names
|
|
|
@ -242,7 +150,7 @@ pub struct IndexMeta { |
|
|
|
|
|
|
|
|
|
|
|
// ancient fields, these were actually written
|
|
|
|
// ancient fields, these were actually written
|
|
|
|
// on sanskrit on stone tablets
|
|
|
|
// on sanskrit on stone tablets
|
|
|
|
#[serde(skip_serializing_if = "Option::is_none")] |
|
|
|
#[serde(skip_serializing_if = "Option::is_none", borrow)] |
|
|
|
pub features2: Option<BTreeMap<String, Vec<String>>>, |
|
|
|
pub features2: Option<BTreeMap<String, Vec<String>>>, |
|
|
|
#[serde(skip_serializing_if = "Option::is_none")] |
|
|
|
#[serde(skip_serializing_if = "Option::is_none")] |
|
|
|
pub v: Option<u8>, |
|
|
|
pub v: Option<u8>, |
|
|
@ -252,20 +160,23 @@ pub struct IndexMeta { |
|
|
|
pub struct IndexDependency { |
|
|
|
pub struct IndexDependency { |
|
|
|
/// corresponds to `explicit_name_in_toml` field in `publish::Dependency`
|
|
|
|
/// corresponds to `explicit_name_in_toml` field in `publish::Dependency`
|
|
|
|
/// when a dep is renamed in Cargo.toml, otherwise same as `package`.
|
|
|
|
/// when a dep is renamed in Cargo.toml, otherwise same as `package`.
|
|
|
|
|
|
|
|
#[serde(borrow)] |
|
|
|
pub name: String, |
|
|
|
pub name: String, |
|
|
|
/// corresponds to `name` in `publish::Dependency`
|
|
|
|
/// corresponds to `name` in `publish::Dependency`
|
|
|
|
#[serde(skip_serializing_if = "Option::is_none")] |
|
|
|
#[serde(skip_serializing_if = "Option::is_none", borrow)] |
|
|
|
pub package: Option<String>, |
|
|
|
pub package: Option<String>, |
|
|
|
/// in publish meta, this field is called `version_req`, and the index
|
|
|
|
/// in publish meta, this field is called `version_req`, and the index
|
|
|
|
/// format requires it to be renamed to `req`
|
|
|
|
/// format requires it to be renamed to `req`
|
|
|
|
#[serde(alias = "version_req")] |
|
|
|
#[serde(alias = "version_req")] |
|
|
|
pub req: semver::VersionReq, |
|
|
|
pub req: semver::VersionReq, |
|
|
|
|
|
|
|
#[serde(borrow)] |
|
|
|
pub features: Vec<String>, |
|
|
|
pub features: Vec<String>, |
|
|
|
pub optional: bool, |
|
|
|
pub optional: bool, |
|
|
|
pub default_features: bool, |
|
|
|
pub default_features: bool, |
|
|
|
|
|
|
|
#[serde(borrow)] |
|
|
|
pub target: Option<String>, |
|
|
|
pub target: Option<String>, |
|
|
|
pub kind: DependencyKind, |
|
|
|
pub kind: DependencyKind, |
|
|
|
#[serde(skip_serializing_if = "Option::is_none")] |
|
|
|
#[serde(skip_serializing_if = "Option::is_none", borrow)] |
|
|
|
pub registry: Option<String>, |
|
|
|
pub registry: Option<String>, |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
@ -281,58 +192,6 @@ pub enum DependencyKind { |
|
|
|
Dev, |
|
|
|
Dev, |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
impl PublishMeta { |
|
|
|
|
|
|
|
pub fn new( |
|
|
|
|
|
|
|
index_meta: IndexMeta, |
|
|
|
|
|
|
|
manifest: ManifestStub, |
|
|
|
|
|
|
|
readme: Option<String>, |
|
|
|
|
|
|
|
) -> Self { |
|
|
|
|
|
|
|
let ManifestStub { package } = manifest; |
|
|
|
|
|
|
|
PublishMeta { |
|
|
|
|
|
|
|
name: package.name, |
|
|
|
|
|
|
|
vers: package.version, |
|
|
|
|
|
|
|
deps: index_meta.deps.into_iter().map(From::from).collect(), |
|
|
|
|
|
|
|
features: index_meta.features, |
|
|
|
|
|
|
|
authors: package.authors, |
|
|
|
|
|
|
|
description: package.description, |
|
|
|
|
|
|
|
documentation: package.documentation, |
|
|
|
|
|
|
|
homepage: package.homepage, |
|
|
|
|
|
|
|
readme, |
|
|
|
|
|
|
|
readme_file: package.readme, |
|
|
|
|
|
|
|
keywords: package.keywords, |
|
|
|
|
|
|
|
categories: package.categories, |
|
|
|
|
|
|
|
license: package.license, |
|
|
|
|
|
|
|
license_file: package.license_file, |
|
|
|
|
|
|
|
repository: package.repository, |
|
|
|
|
|
|
|
links: package.links, |
|
|
|
|
|
|
|
badges: index_meta.badges, |
|
|
|
|
|
|
|
features2: index_meta.features2, |
|
|
|
|
|
|
|
v: index_meta.v, |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
fn serialize_publish_payload( |
|
|
|
|
|
|
|
publish_meta_json: &[u8], |
|
|
|
|
|
|
|
dot_crate_bytes: &[u8], |
|
|
|
|
|
|
|
) -> Vec<u8> { |
|
|
|
|
|
|
|
assert!(publish_meta_json.len() <= u32::MAX as usize); |
|
|
|
|
|
|
|
assert!(dot_crate_bytes.len() <= u32::MAX as usize); |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
let mut out: Vec<u8> = Vec::with_capacity( |
|
|
|
|
|
|
|
publish_meta_json.len() |
|
|
|
|
|
|
|
+ dot_crate_bytes.len() |
|
|
|
|
|
|
|
+ 8 // 2x u32 lengths
|
|
|
|
|
|
|
|
); |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
out.extend_from_slice(&(publish_meta_json.len() as u32).to_le_bytes()[..]); |
|
|
|
|
|
|
|
out.extend_from_slice(publish_meta_json); |
|
|
|
|
|
|
|
out.extend_from_slice(&(dot_crate_bytes.len() as u32).to_le_bytes()[..]); |
|
|
|
|
|
|
|
out.extend_from_slice(dot_crate_bytes); |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
out |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
fn extract_manifest_from_tar<R: Read>(rdr: R) -> Result<Option<String>, Error> { |
|
|
|
fn extract_manifest_from_tar<R: Read>(rdr: R) -> Result<Option<String>, Error> { |
|
|
|
let mut archive = tar::Archive::new(rdr); |
|
|
|
let mut archive = tar::Archive::new(rdr); |
|
|
|
for entry in archive.entries()? { |
|
|
|
for entry in archive.entries()? { |
|
|
@ -362,284 +221,4 @@ fn extract_readme_from_tar<R: Read>(rdr: R, readme_path: &Path) -> Result<Option |
|
|
|
Ok(None) |
|
|
|
Ok(None) |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
fn setup_logger() { |
|
|
|
|
|
|
|
let env_filter = EnvFilter::from_default_env(); |
|
|
|
|
|
|
|
let builder = tracing_subscriber::fmt() |
|
|
|
|
|
|
|
.with_env_filter(env_filter) |
|
|
|
|
|
|
|
.with_ansi(true); |
|
|
|
|
|
|
|
builder.init(); |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
fn load_config_file(opt: &Opt) -> Result<Config, Error> { |
|
|
|
|
|
|
|
if !opt.config_file.exists() { |
|
|
|
|
|
|
|
bail!("path does not exist: {:?}", opt.config_file); |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
let toml = std::fs::read_to_string(&opt.config_file)?; |
|
|
|
|
|
|
|
let mut config: Config = toml::from_str(&toml) |
|
|
|
|
|
|
|
.context("read config file, but unable to parse toml - check \ |
|
|
|
|
|
|
|
format against example config")?; |
|
|
|
|
|
|
|
// augment using command line opts
|
|
|
|
|
|
|
|
config.filter_crates = config.filter_crates.or_else(|| opt.filter_crates.clone()); |
|
|
|
|
|
|
|
config.dry_run |= opt.dry_run; |
|
|
|
|
|
|
|
Ok(config) |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
fn is_hidden(entry: &walkdir::DirEntry) -> bool { |
|
|
|
|
|
|
|
entry |
|
|
|
|
|
|
|
.file_name() |
|
|
|
|
|
|
|
.to_str() |
|
|
|
|
|
|
|
.map(|s| s.starts_with('.')) |
|
|
|
|
|
|
|
.unwrap_or(false) |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async fn get_index_metas( |
|
|
|
|
|
|
|
config: &Config, |
|
|
|
|
|
|
|
) -> Result<HashMap<String, Vec<IndexMeta>>, Error> { |
|
|
|
|
|
|
|
let filter = config.compile_filter()?; |
|
|
|
|
|
|
|
let mut n_excl = 0; |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
let files: Vec<(String, PathBuf)> = walkdir::WalkDir::new(&config.src.index_dir) |
|
|
|
|
|
|
|
.max_depth(3) |
|
|
|
|
|
|
|
.into_iter() |
|
|
|
|
|
|
|
.filter_entry(|e| !is_hidden(e)) |
|
|
|
|
|
|
|
.filter_map(|res| match res { |
|
|
|
|
|
|
|
Ok(entry) => { |
|
|
|
|
|
|
|
if entry.file_type().is_file() && entry.depth() >= 2 && entry.depth() <= 3 { |
|
|
|
|
|
|
|
let path = entry.into_path(); |
|
|
|
|
|
|
|
let crate_name: &str = path.file_name().and_then(|x| x.to_str()).unwrap_or(""); |
|
|
|
|
|
|
|
if let Some(filter) = filter.as_ref() { |
|
|
|
|
|
|
|
if !filter.is_match(crate_name.as_ref()) { |
|
|
|
|
|
|
|
trace!(%crate_name, "crate excluded by filter"); |
|
|
|
|
|
|
|
n_excl += 1; |
|
|
|
|
|
|
|
return None; |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
debug!(?path, "found crate index metadata file to parse"); |
|
|
|
|
|
|
|
Some((crate_name.to_owned(), path)) |
|
|
|
|
|
|
|
} else { |
|
|
|
|
|
|
|
None |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
Err(e) => { |
|
|
|
|
|
|
|
warn!(error = ?e, "walkdir result is error"); |
|
|
|
|
|
|
|
None |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
}) |
|
|
|
|
|
|
|
.collect(); |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
let n_files = files.len(); |
|
|
|
|
|
|
|
info!("found {} crate index metadata files to parse", n_files); |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if n_excl > 0 { |
|
|
|
|
|
|
|
warn!( |
|
|
|
|
|
|
|
regex = %config.filter_crates.as_deref().unwrap_or(""), |
|
|
|
|
|
|
|
n_files, |
|
|
|
|
|
|
|
n_excl, |
|
|
|
|
|
|
|
"regex filter (--filter-crates) excluded {} crates", n_excl, |
|
|
|
|
|
|
|
); |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
let crate_versions: Vec<Result<(String, Vec<IndexMeta>), Error>> = |
|
|
|
|
|
|
|
futures::stream::iter(files.into_iter().map(|(crate_name, path)| { |
|
|
|
|
|
|
|
async move { |
|
|
|
|
|
|
|
let file = tokio::fs::File::open(&path).await.map_err(|e| { |
|
|
|
|
|
|
|
error!(err = ?e, ?path, "failed to open file"); |
|
|
|
|
|
|
|
e |
|
|
|
|
|
|
|
})?; |
|
|
|
|
|
|
|
let buf = tokio::io::BufReader::new(file); |
|
|
|
|
|
|
|
let mut out = Vec::new(); |
|
|
|
|
|
|
|
let mut lines = buf.lines(); |
|
|
|
|
|
|
|
'lines: while let Some(line) = lines.next_line().await? { |
|
|
|
|
|
|
|
let index_meta: IndexMeta = serde_json::from_str(&line) |
|
|
|
|
|
|
|
.map_err(|e| { |
|
|
|
|
|
|
|
error!(err = ?e, ?path, "failed to parse line"); |
|
|
|
|
|
|
|
e |
|
|
|
|
|
|
|
})?; |
|
|
|
|
|
|
|
out.push(index_meta); |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
debug!(crate_name = %out.first().map(|x| x.name.as_str()).unwrap_or("na"), |
|
|
|
|
|
|
|
"parsed {} crate versions from metadata file", out.len() |
|
|
|
|
|
|
|
); |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
Ok((crate_name, out)) |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
})) |
|
|
|
|
|
|
|
.buffer_unordered(num_cpus::get()) |
|
|
|
|
|
|
|
.collect() |
|
|
|
|
|
|
|
.await; |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
let mut total_number_of_crate_versions = 0; |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
// map of crate-name => [IndexMeta] (one per published version)
|
|
|
|
|
|
|
|
let crate_versions: HashMap<String, Vec<IndexMeta>> = crate_versions |
|
|
|
|
|
|
|
.into_iter() |
|
|
|
|
|
|
|
.filter_map(|result| match result { |
|
|
|
|
|
|
|
Ok((crate_name, xs)) => { |
|
|
|
|
|
|
|
total_number_of_crate_versions += xs.len(); |
|
|
|
|
|
|
|
Some((crate_name, xs)) |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
Err(e) => { |
|
|
|
|
|
|
|
error!(err = ?e, "parsing metadata failed, skipping file"); |
|
|
|
|
|
|
|
None |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
}) |
|
|
|
|
|
|
|
.collect(); |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
info!( |
|
|
|
|
|
|
|
n_files, |
|
|
|
|
|
|
|
n_excl, |
|
|
|
|
|
|
|
n_crates = crate_versions.len(), |
|
|
|
|
|
|
|
total_number_of_crate_versions, |
|
|
|
|
|
|
|
"parsed {} crate version metadata entries from index", |
|
|
|
|
|
|
|
total_number_of_crate_versions, |
|
|
|
|
|
|
|
); |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
Ok(crate_versions) |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async fn process_crates( |
|
|
|
|
|
|
|
config: &Config, |
|
|
|
|
|
|
|
crate_versions: HashMap<String, Vec<IndexMeta>>, |
|
|
|
|
|
|
|
) -> Result<(), Error> { |
|
|
|
|
|
|
|
let http_client = reqwest::Client::builder() |
|
|
|
|
|
|
|
.user_agent(&config.http.user_agent) |
|
|
|
|
|
|
|
.build()?; |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
let publish_url = config.dst.api_url.join("/api/v1/crates/new")?; |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
for (crate_name, versions) in crate_versions { |
|
|
|
|
|
|
|
for index_meta in versions { |
|
|
|
|
|
|
|
let version = index_meta.vers.clone(); |
|
|
|
|
|
|
|
debug!(%crate_name, %version, "processing crate version"); |
|
|
|
|
|
|
|
let dot_crate_path = config.src.crate_files_dir |
|
|
|
|
|
|
|
.join(&format!("{}/{}/download", crate_name, index_meta.vers)); |
|
|
|
|
|
|
|
verify_file_exists(&dot_crate_path).await?; |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
debug!(path = ?dot_crate_path, "reading .crate file"); |
|
|
|
|
|
|
|
let dot_crate_bytes = tokio::fs::read(&dot_crate_path) |
|
|
|
|
|
|
|
.await |
|
|
|
|
|
|
|
.with_context(|| { |
|
|
|
|
|
|
|
format!("failed to read .crate file for \ |
|
|
|
|
|
|
|
{crate_name} v{0} with path {dot_crate_path:?}", |
|
|
|
|
|
|
|
index_meta.vers, |
|
|
|
|
|
|
|
) |
|
|
|
|
|
|
|
})?; |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
debug!("extracting Cargo.toml from .crate targz archive"); |
|
|
|
|
|
|
|
let decoder = flate2::read::GzDecoder::new(&dot_crate_bytes[..]); |
|
|
|
|
|
|
|
let manifest_toml = extract_manifest_from_tar(decoder)? |
|
|
|
|
|
|
|
.ok_or_else(|| anyhow!("Cargo.toml not found in .crate targz archive"))?; |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
let manifest: ManifestStub = toml::from_str(&manifest_toml)?; |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
let mut readme: Option<String> = None; |
|
|
|
|
|
|
|
if let Some(readme_path) = manifest.package.readme.as_ref() { |
|
|
|
|
|
|
|
let decoder = flate2::read::GzDecoder::new(&dot_crate_bytes[..]); |
|
|
|
|
|
|
|
if let Some(readme_content) = extract_readme_from_tar(decoder, readme_path)? { |
|
|
|
|
|
|
|
debug!(length = readme_content.len(), "extracted readme file content from .crate targz archive"); |
|
|
|
|
|
|
|
readme = Some(readme_content); |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
let publish_meta = PublishMeta::new(index_meta, manifest, readme); |
|
|
|
|
|
|
|
let publish_meta_json = serde_json::to_vec(&publish_meta)?; |
|
|
|
|
|
|
|
let payload = serialize_publish_payload(&publish_meta_json, &dot_crate_bytes); |
|
|
|
|
|
|
|
debug!( |
|
|
|
|
|
|
|
n_bytes = payload.len(), |
|
|
|
|
|
|
|
%crate_name, |
|
|
|
|
|
|
|
%version, |
|
|
|
|
|
|
|
"serialized publish payload", |
|
|
|
|
|
|
|
); |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if config.dry_run { |
|
|
|
|
|
|
|
debug!( |
|
|
|
|
|
|
|
%crate_name, |
|
|
|
|
|
|
|
%version, |
|
|
|
|
|
|
|
%publish_url, |
|
|
|
|
|
|
|
"skipping publish (--dry-run mode)", |
|
|
|
|
|
|
|
); |
|
|
|
|
|
|
|
continue; |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
let resp = http_client.put(publish_url.clone()) |
|
|
|
|
|
|
|
.header(AUTHORIZATION, &config.dst.auth_token) |
|
|
|
|
|
|
|
.body(payload) |
|
|
|
|
|
|
|
.send() |
|
|
|
|
|
|
|
.await?; |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
debug!(status = ?resp.status(), "rcvd server response to publish request"); |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
let resp_body: serde_json::Value = resp |
|
|
|
|
|
|
|
.error_for_status()? |
|
|
|
|
|
|
|
.json() |
|
|
|
|
|
|
|
.await?; |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
debug!("server response body:\n{resp_body:#?}"); |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
info!( |
|
|
|
|
|
|
|
%crate_name, |
|
|
|
|
|
|
|
%version, |
|
|
|
|
|
|
|
"published crate version", |
|
|
|
|
|
|
|
); |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
Ok(()) |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async fn verify_dir_exists<P: AsRef<std::path::Path>>(path: P) -> Result<(), Error> { |
|
|
|
|
|
|
|
match tokio::fs::metadata(path.as_ref()).await { |
|
|
|
|
|
|
|
Ok(meta) if meta.is_dir() => Ok(()), |
|
|
|
|
|
|
|
Ok(meta) /* if ! meta.is_dir() */ => { |
|
|
|
|
|
|
|
debug_assert!( ! meta.is_dir()); |
|
|
|
|
|
|
|
bail!("path exists, but is not a directory: {:?}", path.as_ref()) |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
Err(e) if e.kind() == std::io::ErrorKind::NotFound => { |
|
|
|
|
|
|
|
bail!("path does not exist: {}", path.as_ref().display()); |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
Err(e) => Err(e.into()), |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
async fn verify_file_exists<P: AsRef<std::path::Path>>(path: P) -> Result<(), Error> { |
|
|
|
|
|
|
|
match tokio::fs::metadata(path.as_ref()).await { |
|
|
|
|
|
|
|
Ok(meta) if meta.is_file() => Ok(()), |
|
|
|
|
|
|
|
Ok(meta) /* if ! meta.is_file() */ => { |
|
|
|
|
|
|
|
debug_assert!( ! meta.is_file()); |
|
|
|
|
|
|
|
bail!("path exists, but is not a file: {:?}", path.as_ref()) |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
Err(e) if e.kind() == std::io::ErrorKind::NotFound => { |
|
|
|
|
|
|
|
bail!("path does not exist: {}", path.as_ref().display()); |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
Err(e) => Err(e.into()), |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
fn main() -> Result<(), Error> { |
|
|
|
|
|
|
|
let begin = Instant::now(); |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
dotenvy::dotenv().ok(); |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
let opt = Opt::parse(); |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
setup_logger(); |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
let config = load_config_file(&opt)?; |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
let rt = tokio::runtime::Runtime::new()?; |
|
|
|
|
|
|
|
rt.block_on(verify_dir_exists(&config.src.index_dir))?; |
|
|
|
|
|
|
|
rt.block_on(verify_dir_exists(&config.src.crate_files_dir))?; |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if opt.validate { |
|
|
|
|
|
|
|
println!("{:#?}", config); |
|
|
|
|
|
|
|
return Ok(()) |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
let krates = rt.block_on(get_index_metas(&config))?; |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
rt.block_on(process_crates(&config, krates))?; |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
info!("finished in {:?}", begin.elapsed()); |
|
|
|
|
|
|
|
Ok(()) |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|