Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 0 additions & 12 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 0 additions & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,6 @@ serde_json = { version = "1.0.99" }
tar = { version = "0.4.38" }
thiserror = { version = "1.0.40" }
tracing = { version = "0.1.37" }
tracing-chrome = { version = "0.7.1" }
tracing-subscriber = { version = "0.3.17", features = ["env-filter"] }
tokio = { version = "1", features = ["rt", "macros", "rt-multi-thread"] }
uuid = { version = "1.4.0", features = ["v4", "fast-rng"] }
Expand Down
1 change: 0 additions & 1 deletion crates/cli/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,5 @@ pacquet_package_json = { workspace = true }
anyhow = { workspace = true }
clap = { workspace = true }
tracing = { workspace = true }
tracing-chrome = { workspace = true }
tracing-subscriber = { workspace = true }
tokio = { workspace = true }
2 changes: 1 addition & 1 deletion crates/cli/src/commands.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ use pacquet_package_json::DependencyGroup;
#[derive(Parser, Debug)]
#[command(name = "pacquet")]
#[command(bin_name = "pacquet")]
#[command(version = "0.0.1")]
#[command(version = "0.0.8")]
#[command(about = "Experimental package manager for node.js")]
pub struct Cli {
#[command(subcommand)]
Expand Down
4 changes: 1 addition & 3 deletions crates/cli/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,11 +9,10 @@ use commands::{Cli, Subcommands};
use pacquet_package_json::PackageJson;
use pacquet_registry::RegistryManager;

use crate::tracing::{enable_tracing_by_env, enable_tracing_by_env_with_chrome_layer};
use crate::tracing::enable_tracing_by_env;

pub async fn run_commands() -> Result<()> {
enable_tracing_by_env();
enable_tracing_by_env_with_chrome_layer();
let current_directory = env::current_dir().context("problem fetching current directory")?;
let package_json_path = current_directory.join("package.json");
let cli = Cli::parse();
Expand All @@ -29,7 +28,6 @@ pub async fn run_commands() -> Result<()> {
current_directory.join(&args.virtual_store_dir),
package_json_path,
)?;
registry_manager.prepare()?;
// TODO if a package already exists in another dependency group, we don't remove
// the existing entry.
registry_manager.add_dependency(&args.package, args.get_dependency_group()).await?;
Expand Down
25 changes: 1 addition & 24 deletions crates/cli/src/tracing.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
use std::{str::FromStr, sync::atomic::AtomicBool};

use tracing::Level;
use tracing_chrome::FlushGuard;
use tracing_subscriber::{fmt::format::FmtSpan, layer::Filter, EnvFilter, Layer};

static IS_TRACING_ENABLED: AtomicBool = AtomicBool::new(false);
Expand All @@ -18,7 +17,7 @@ impl<S> Filter<S> for FilterEvent {
!meta.is_event()
}
}
pub fn enable_tracing_by_env() -> Option<FlushGuard> {
pub fn enable_tracing_by_env() {
let trace_var = std::env::var("TRACE").ok();
let is_enable_tracing = trace_var.is_some();

Expand All @@ -32,7 +31,6 @@ pub fn enable_tracing_by_env() -> Option<FlushGuard> {
.init();
tracing::trace!("enable_tracing_by_env");
}
None
}

fn generate_common_layers(
Expand Down Expand Up @@ -60,24 +58,3 @@ fn generate_common_layers(
}
layers
}

pub fn enable_tracing_by_env_with_chrome_layer() -> Option<FlushGuard> {
let trace_var = std::env::var("TRACE").ok();
let is_enable_tracing = trace_var.is_some();
if is_enable_tracing && !IS_TRACING_ENABLED.swap(true, std::sync::atomic::Ordering::SeqCst) {
use tracing_chrome::ChromeLayerBuilder;
use tracing_subscriber::prelude::*;

let (chrome_layer, guard) = ChromeLayerBuilder::new().include_args(true).build();
let layers = generate_common_layers(trace_var);
// If we don't do this. chrome_layer will collect nothing.
// std::mem::forget(guard);
tracing_subscriber::registry()
.with(layers)
.with(chrome_layer.with_filter(FilterEvent {}))
.init();
Some(guard)
} else {
None
}
}
141 changes: 65 additions & 76 deletions crates/registry/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,24 +2,21 @@ mod error;
mod http_client;
mod package;

use std::{
collections::HashMap,
fs,
path::{Path, PathBuf},
};
use std::path::{Path, PathBuf};

use async_recursion::async_recursion;
use futures_util::future::join_all;
use pacquet_package_json::{DependencyGroup, PackageJson};
use pacquet_tarball::{download_dependency, get_package_store_folder_name};
use pacquet_tarball::{get_package_store_folder_name, TarballManager};

use crate::{error::RegistryError, http_client::HttpClient};

pub struct RegistryManager {
client: HttpClient,
client: Box<HttpClient>,
node_modules_path: PathBuf,
store_path: PathBuf,
package_json: PackageJson,
package_json: Box<PackageJson>,
tarball_manager: Box<TarballManager>,
}

impl RegistryManager {
Expand All @@ -29,19 +26,21 @@ impl RegistryManager {
package_json_path: P,
) -> Result<RegistryManager, RegistryError> {
Ok(RegistryManager {
client: HttpClient::new(),
client: Box::new(HttpClient::new()),
node_modules_path: node_modules_path.into(),
store_path: store_path.into(),
package_json: PackageJson::create_if_needed(&package_json_path.into())?,
package_json: Box::new(PackageJson::create_if_needed(&package_json_path.into())?),
tarball_manager: Box::new(TarballManager::new()),
})
}

pub fn prepare(&self) -> Result<(), RegistryError> {
// create store path.
fs::create_dir_all(&self.store_path)?;
Ok(())
}

/// Here is a brief overview of what this package does.
/// 1. Get a dependency
/// 2. Save the dependency to node_modules/.pacquet/pkg@version/node_modules/pkg
/// 3. Create a symlink to node_modules/pkg
/// 4. Download all dependencies to node_modules/.pacquet
/// 5. Symlink all dependencies to node_modules/.pacquet/pkg@version/node_modules
/// 6. Update package.json
pub async fn add_dependency(
&mut self,
name: &str,
Expand All @@ -51,39 +50,30 @@ impl RegistryManager {
let dependency_store_folder_name =
get_package_store_folder_name(name, &latest_version.version.to_string());

let mut save_path =
self.store_path.join(dependency_store_folder_name).join("node_modules").join(name);
let symlink_to = self.node_modules_path.join(name);

download_dependency(
name,
latest_version.get_tarball_url(),
save_path.as_ref(),
symlink_to.as_ref(),
)
.await?;

let mut all_dependencies: HashMap<&String, &String> = HashMap::new();

if let Some(deps) = latest_version.dependencies.as_ref() {
all_dependencies.extend(deps);
let package_node_modules_path =
self.store_path.join(dependency_store_folder_name).join("node_modules");

self.tarball_manager
.download_dependency(
name,
latest_version.get_tarball_url(),
&package_node_modules_path.join(name),
&self.node_modules_path.join(name),
)
.await?;

if let Some(dependencies) = latest_version.dependencies.as_ref() {
join_all(
dependencies
.iter()
.map(|(name, version)| {
self.add_package(name, version, &package_node_modules_path)
})
.collect::<Vec<_>>(),
)
.await;
}

// If package is under an organization such as @fastify/error
// We need to go 2 folders to find the correct node_modules folder.
// For example symlink_path should be node_modules for node_modules/@fastify/error.
if name.contains('/') {
save_path = save_path.parent().unwrap().to_path_buf();
}

join_all(
all_dependencies
.into_iter()
.map(|(name, version)| self.add_package(name, version, &save_path))
.collect::<Vec<_>>(),
)
.await;

self.package_json.add_dependency(
name,
&format!("^{0}", &latest_version.version),
Expand All @@ -105,37 +95,36 @@ impl RegistryManager {
let package_version = package.get_suitable_version_of(version)?.unwrap();
let dependency_store_folder_name =
get_package_store_folder_name(name, &package_version.version.to_string());
let save_path =
self.store_path.join(dependency_store_folder_name).join("node_modules").join(name);

download_dependency(
name,
package_version.get_tarball_url(),
save_path.as_ref(),
&symlink_path.join(&package.name),
)
.await?;

let all_dependencies: HashMap<String, String> =
package_version.dependencies.clone().unwrap_or(HashMap::<String, String>::new());

let mut symlink_path = save_path.parent().unwrap();

// If package is under an organization such as @fastify/error
// We need to go 2 folders to find the correct node_modules folder.
// For example symlink_path should be node_modules for node_modules/@fastify/error.
if name.contains('/') {
symlink_path = symlink_path.parent().unwrap();
let package_node_modules_path =
self.store_path.join(dependency_store_folder_name).join("node_modules");

// Make sure to lock the package's mutex so we don't install the same package's tarball
// in different threads.
let mutex_guard = package.mutex.lock().await;

self.tarball_manager
.download_dependency(
name,
package_version.get_tarball_url(),
&package_node_modules_path.join(name),
&symlink_path.join(&package.name),
)
.await?;

drop(mutex_guard);

if let Some(dependencies) = package_version.dependencies.as_ref() {
join_all(
dependencies
.iter()
.map(|(name, version)| {
self.add_package(name, version, &package_node_modules_path)
})
.collect::<Vec<_>>(),
)
.await;
}

join_all(
all_dependencies
.iter()
.map(|(name, version)| self.add_package(name, version, symlink_path))
.collect::<Vec<_>>(),
)
.await;

Ok(())
}
}
6 changes: 5 additions & 1 deletion crates/registry/src/package.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
use std::collections::HashMap;
use std::{collections::HashMap, sync::Arc};

use serde::{Deserialize, Serialize};
use tokio::sync::Mutex;

use crate::error::RegistryError;

Expand Down Expand Up @@ -35,6 +36,9 @@ pub struct Package {
#[serde(alias = "dist-tags")]
dist_tags: HashMap<String, String>,
pub versions: HashMap<String, PackageVersion>,

#[serde(skip_serializing, skip_deserializing)]
pub mutex: Arc<Mutex<u8>>,
}

impl Package {
Expand Down
Loading