Skip to content

Commit

Permalink
Simpler metadata discovery (#1635)
Browse files Browse the repository at this point in the history
  • Loading branch information
kennykerr authored Mar 25, 2022
1 parent b3ebe39 commit be54b2b
Show file tree
Hide file tree
Showing 5 changed files with 26 additions and 68 deletions.
2 changes: 1 addition & 1 deletion crates/libs/metadata/src/reader/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -40,4 +40,4 @@ pub use type_kind::*;
pub use type_name::*;
pub use type_reader::*;
pub use type_tree::*;
pub use workspace::*;
use workspace::*;
1 change: 1 addition & 0 deletions crates/libs/metadata/src/reader/type_reader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ pub struct TypeReader {
}

impl TypeReader {
// TODO: figure out how this can be non-static
pub fn get() -> &'static Self {
use std::{mem::MaybeUninit, sync::Once};
static ONCE: Once = Once::new();
Expand Down
61 changes: 10 additions & 51 deletions crates/libs/metadata/src/reader/workspace.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
use super::*;

// TODO: should just pass files directly to the reader. https://github.com/microsoft/windows-rs/issues/1406
pub fn workspace_winmds() -> &'static [File] {
use std::{mem::MaybeUninit, sync::Once};
static ONCE: Once = Once::new();
Expand All @@ -10,64 +11,22 @@ pub fn workspace_winmds() -> &'static [File] {
unsafe { &*VALUE.as_ptr() }
}

fn json_value(key: &str) -> String {
let json = cargo_metadata();
let json_key = format!(r#""{}":""#, key);

let beginning_index = json.rfind(&json_key).unwrap_or_else(|| panic!("Cargo metadata did not contain `{}` key.", key)) + json_key.len();

let ending_index = json[beginning_index..].find('"').unwrap_or_else(|| panic!("Cargo metadata ended before closing `\"` in `{}` value", key));

json[beginning_index..beginning_index + ending_index].replace("\\\\", "\\")
}

#[doc(hidden)]
pub fn workspace_dir() -> String {
json_value("workspace_root")
}

pub fn target_dir() -> String {
json_value("target_directory")
}

fn cargo_metadata() -> &'static str {
use std::{mem::MaybeUninit, sync::Once};
static ONCE: Once = Once::new();
static mut VALUE: MaybeUninit<String> = MaybeUninit::uninit();

ONCE.call_once(|| {
let output = std::process::Command::new(env!("CARGO")).arg("metadata").arg("--format-version=1").arg("--no-deps").arg("--offline").output().expect("Failed to run `cargo metadata`");

unsafe { VALUE = MaybeUninit::new(String::from_utf8(output.stdout).expect("Cargo metadata is not utf-8")) }
});

// This is safe because `call_once` has already been called.
unsafe { &*VALUE.as_ptr() }
}

fn get_workspace_winmds() -> Vec<File> {
fn push_dir(result: &mut Vec<File>, dir: &std::path::Path) {
if let Ok(files) = std::fs::read_dir(&dir) {
for file in files.filter_map(|file| file.ok()) {
if let Ok(file_type) = file.file_type() {
if file_type.is_file() {
let path = file.path();
if path.extension().and_then(|extension| extension.to_str()) == Some("winmd") {
result.push(File::new(path));
}
let mut result = vec![];

if let Ok(files) = std::fs::read_dir(".windows/winmd") {
for file in files.filter_map(|file| file.ok()) {
if let Ok(file_type) = file.file_type() {
if file_type.is_file() {
let path = file.path();
if path.extension().and_then(|extension| extension.to_str()) == Some("winmd") {
result.push(File::new(path));
}
}
}
}
}

let mut result = vec![];

let mut dir: std::path::PathBuf = workspace_dir().into();
dir.push(".windows");
dir.push("winmd");
push_dir(&mut result, &dir);

if !result.iter().any(|file| file.name.starts_with("Windows.")) {
result.push(File::from_bytes("Windows.winmd".to_string(), include_bytes!("../../default/Windows.winmd").to_vec()));
result.push(File::from_bytes("Windows.Win32.winmd".to_string(), include_bytes!("../../default/Windows.Win32.winmd").to_vec()));
Expand Down
2 changes: 1 addition & 1 deletion crates/libs/metadata/src/writer/tables/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ impl Tables {
}

// Once all of the type information has been added, normalization is the process of packing
// the various relational records into their respective tables and leaving only index behind.
// the various relational records into their respective tables and leaving only offsets behind.
fn normalize(&mut self) {
for type_def in &mut self.type_def {
type_def.field_index = self.field.len();
Expand Down
28 changes: 13 additions & 15 deletions crates/tools/yml/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@ fn main() {
}

fn test_yml() {
let root = std::path::PathBuf::from(metadata::reader::workspace_dir());
let mut yml = r#"name: Test
on:
Expand Down Expand Up @@ -68,7 +67,7 @@ jobs:
run: |"#
.to_string();

for name in crates(&root) {
for name in crates() {
if !requires_nightly(&name) {
yml.push_str(&format!("\n cargo test --target ${{{{ matrix.target }}}} -p {} &&", name));
}
Expand All @@ -84,7 +83,7 @@ jobs:
run: |"#,
);

for name in crates(&root) {
for name in crates() {
if requires_nightly(&name) {
yml.push_str(&format!("\n cargo test --target ${{{{ matrix.target }}}} -p {} &&", name));
}
Expand Down Expand Up @@ -120,11 +119,10 @@ jobs:
"#,
);

std::fs::write(root.join(".github/workflows/test.yml"), yml.as_bytes()).unwrap();
std::fs::write(".github/workflows/test.yml", yml.as_bytes()).unwrap();
}

fn build_yml() {
let root = std::path::PathBuf::from(metadata::reader::workspace_dir());
let mut yml = r#"name: Build
on:
Expand Down Expand Up @@ -199,39 +197,39 @@ jobs:
run: |"#
.to_string();

for name in crates(&root) {
for name in crates() {
yml.push_str(&format!("\n cargo clippy -p {} &&", name));
}

yml.truncate(yml.len() - 2);

std::fs::write(root.join(".github/workflows/build.yml"), yml.as_bytes()).unwrap();
std::fs::write(".github/workflows/build.yml", yml.as_bytes()).unwrap();
}

fn crates(root: &std::path::Path) -> Vec<String> {
fn crates() -> Vec<String> {
let mut crates = vec![];

for dir in dirs(root, "crates/libs") {
for dir in dirs("crates/libs") {
if dir == "windows" {
crates.push("windows".to_string());
} else {
crates.push(format!("windows-{}", dir));
}
}

for dir in dirs(root, "crates/samples") {
for dir in dirs("crates/samples") {
crates.push(format!("sample_{}", dir));
}

for dir in dirs(root, "crates/targets") {
for dir in dirs("crates/targets") {
crates.push(format!("windows_{}", dir));
}

for dir in dirs(root, "crates/tests") {
for dir in dirs("crates/tests") {
crates.push(format!("test_{}", dir));
}

for dir in dirs(root, "crates/tools") {
for dir in dirs("crates/tools") {
crates.push(format!("tool_{}", dir));
}

Expand All @@ -242,10 +240,10 @@ fn requires_nightly(name: &str) -> bool {
name.contains("implement") || name.contains("nightly") || name.starts_with("sample")
}

fn dirs(root: &std::path::Path, path: &str) -> Vec<String> {
fn dirs(path: &str) -> Vec<String> {
let mut dirs = vec![];

if let Ok(files) = std::fs::read_dir(root.join(path)) {
if let Ok(files) = std::fs::read_dir(path) {
for file in files.filter_map(|file| file.ok()) {
if let Ok(file_type) = file.file_type() {
if file_type.is_dir() {
Expand Down

0 comments on commit be54b2b

Please sign in to comment.