diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml new file mode 100644 index 0000000..8754eab --- /dev/null +++ b/.github/workflows/ci.yaml @@ -0,0 +1,70 @@ +name: CI + +on: + push: + branches: [main] + pull_request: + workflow_dispatch: + +concurrency: + group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.event.pull_request.number || github.sha }} + cancel-in-progress: true + +env: + CARGO_INCREMENTAL: 0 + CARGO_NET_RETRY: 10 + CARGO_TERM_COLOR: always + RUSTUP_MAX_RETRIES: 10 + PACKAGE_NAME: ruff-tools + PYTHON_VERSION: 3.12 + +jobs: + cargo-fmt: + name: cargo fmt + runs-on: ubuntu-latest + timeout-minutes: 2 + steps: + - uses: actions/checkout@v4 + - run: rustup component add rustfmt + - run: cargo fmt --all --check + + cargo-clippy: + name: cargo clippy + runs-on: ubuntu-latest + timeout-minutes: 2 + steps: + - uses: actions/checkout@v4 + - run: rustup component add clippy + - run: cargo clippy --workspace --all-targets --all-features --locked -- -D warnings + + cargo-test-linux: + name: cargo test (linux) + runs-on: ubuntu-latest + timeout-minutes: 5 + steps: + - uses: actions/checkout@v4 + - run: rustup show + - run: cargo test + - run: cargo doc --all --no-deps --document-private-items + env: + # `cargo doc --check` isn't yet implemented (rust-lang/cargo#10025). + RUSTDOCFLAGS: "-D warnings" + + cargo-build-release: + name: cargo build (release) + runs-on: macos-latest + if: github.ref == 'refs/heads/main' + timeout-minutes: 5 + steps: + - uses: actions/checkout@v4 + - run: rustup show + - run: cargo build --release --locked + + cargo-shear: + name: cargo shear + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: cargo-bins/cargo-binstall@main + - run: cargo binstall --no-confirm cargo-shear + - run: cargo shear diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000..1dffbcb --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,291 @@ +# This file was autogenerated by dist: https://opensource.axo.dev/cargo-dist/ +# +# Copyright 2022-2024, axodotdev +# SPDX-License-Identifier: MIT or Apache-2.0 +# +# CI that: +# +# * checks for a Git Tag that looks like a release +# * builds artifacts with dist (archives, installers, hashes) +# * uploads those artifacts to temporary workflow zip +# * on success, uploads the artifacts to a GitHub Release +# +# Note that the GitHub Release will be created with a generated +# title/body based on your changelogs. + +name: Release +permissions: + "contents": "write" + +# This task will run whenever you push a git tag that looks like a version +# like "1.0.0", "v0.1.0-prerelease.1", "my-app/0.1.0", "releases/v1.0.0", etc. +# Various formats will be parsed into a VERSION and an optional PACKAGE_NAME, where +# PACKAGE_NAME must be the name of a Cargo package in your workspace, and VERSION +# must be a Cargo-style SemVer Version (must have at least major.minor.patch). +# +# If PACKAGE_NAME is specified, then the announcement will be for that +# package (erroring out if it doesn't have the given version or isn't dist-able). +# +# If PACKAGE_NAME isn't specified, then the announcement will be for all +# (dist-able) packages in the workspace with that version (this mode is +# intended for workspaces with only one dist-able package, or with all dist-able +# packages versioned/released in lockstep). +# +# If you push multiple tags at once, separate instances of this workflow will +# spin up, creating an independent announcement for each one. However, GitHub +# will hard limit this to 3 tags per commit, as it will assume more tags is a +# mistake. +# +# If there's a prerelease-style suffix to the version, then the release(s) +# will be marked as a prerelease. +on: + pull_request: + push: + tags: + - '**[0-9]+.[0-9]+.[0-9]+*' + +jobs: + # Run 'dist plan' (or host) to determine what tasks we need to do + plan: + runs-on: "ubuntu-20.04" + outputs: + val: ${{ steps.plan.outputs.manifest }} + tag: ${{ !github.event.pull_request && github.ref_name || '' }} + tag-flag: ${{ !github.event.pull_request && format('--tag={0}', github.ref_name) || '' }} + publishing: ${{ !github.event.pull_request }} + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + steps: + - uses: actions/checkout@v4 + with: + submodules: recursive + - name: Install dist + # we specify bash to get pipefail; it guards against the `curl` command + # failing. otherwise `sh` won't catch that `curl` returned non-0 + shell: bash + run: "curl --proto '=https' --tlsv1.2 -LsSf /~https://github.com/axodotdev/cargo-dist/releases/download/v0.27.0/cargo-dist-installer.sh | sh" + - name: Cache dist + uses: actions/upload-artifact@v4 + with: + name: cargo-dist-cache + path: ~/.cargo/bin/dist + # sure would be cool if github gave us proper conditionals... + # so here's a doubly-nested ternary-via-truthiness to try to provide the best possible + # functionality based on whether this is a pull_request, and whether it's from a fork. + # (PRs run on the *source* but secrets are usually on the *target* -- that's *good* + # but also really annoying to build CI around when it needs secrets to work right.) + - id: plan + run: | + dist ${{ (!github.event.pull_request && format('host --steps=create --tag={0}', github.ref_name)) || 'plan' }} --output-format=json > plan-dist-manifest.json + echo "dist ran successfully" + cat plan-dist-manifest.json + echo "manifest=$(jq -c "." plan-dist-manifest.json)" >> "$GITHUB_OUTPUT" + - name: "Upload dist-manifest.json" + uses: actions/upload-artifact@v4 + with: + name: artifacts-plan-dist-manifest + path: plan-dist-manifest.json + + # Build and packages all the platform-specific things + build-local-artifacts: + name: build-local-artifacts (${{ join(matrix.targets, ', ') }}) + # Let the initial task tell us to not run (currently very blunt) + needs: + - plan + if: ${{ fromJson(needs.plan.outputs.val).ci.github.artifacts_matrix.include != null && (needs.plan.outputs.publishing == 'true' || fromJson(needs.plan.outputs.val).ci.github.pr_run_mode == 'upload') }} + strategy: + fail-fast: false + # Target platforms/runners are computed by dist in create-release. + # Each member of the matrix has the following arguments: + # + # - runner: the github runner + # - dist-args: cli flags to pass to dist + # - install-dist: expression to run to install dist on the runner + # + # Typically there will be: + # - 1 "global" task that builds universal installers + # - N "local" tasks that build each platform's binaries and platform-specific installers + matrix: ${{ fromJson(needs.plan.outputs.val).ci.github.artifacts_matrix }} + runs-on: ${{ matrix.runner }} + container: ${{ matrix.container && matrix.container.image || null }} + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + BUILD_MANIFEST_NAME: target/distrib/${{ join(matrix.targets, '-') }}-dist-manifest.json + steps: + - name: enable windows longpaths + run: | + git config --global core.longpaths true + - uses: actions/checkout@v4 + with: + submodules: recursive + - name: Install Rust non-interactively if not already installed + if: ${{ matrix.container }} + run: | + if ! command -v cargo > /dev/null 2>&1; then + curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y + echo "$HOME/.cargo/bin" >> $GITHUB_PATH + fi + - name: Install dist + run: ${{ matrix.install_dist.run }} + # Get the dist-manifest + - name: Fetch local artifacts + uses: actions/download-artifact@v4 + with: + pattern: artifacts-* + path: target/distrib/ + merge-multiple: true + - name: Install dependencies + run: | + ${{ matrix.packages_install }} + - name: Build artifacts + run: | + # Actually do builds and make zips and whatnot + dist build ${{ needs.plan.outputs.tag-flag }} --print=linkage --output-format=json ${{ matrix.dist_args }} > dist-manifest.json + echo "dist ran successfully" + - id: cargo-dist + name: Post-build + # We force bash here just because github makes it really hard to get values up + # to "real" actions without writing to env-vars, and writing to env-vars has + # inconsistent syntax between shell and powershell. + shell: bash + run: | + # Parse out what we just built and upload it to scratch storage + echo "paths<> "$GITHUB_OUTPUT" + dist print-upload-files-from-manifest --manifest dist-manifest.json >> "$GITHUB_OUTPUT" + echo "EOF" >> "$GITHUB_OUTPUT" + + cp dist-manifest.json "$BUILD_MANIFEST_NAME" + - name: "Upload artifacts" + uses: actions/upload-artifact@v4 + with: + name: artifacts-build-local-${{ join(matrix.targets, '_') }} + path: | + ${{ steps.cargo-dist.outputs.paths }} + ${{ env.BUILD_MANIFEST_NAME }} + + # Build and package all the platform-agnostic(ish) things + build-global-artifacts: + needs: + - plan + - build-local-artifacts + runs-on: "ubuntu-20.04" + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + BUILD_MANIFEST_NAME: target/distrib/global-dist-manifest.json + steps: + - uses: actions/checkout@v4 + with: + submodules: recursive + - name: Install cached dist + uses: actions/download-artifact@v4 + with: + name: cargo-dist-cache + path: ~/.cargo/bin/ + - run: chmod +x ~/.cargo/bin/dist + # Get all the local artifacts for the global tasks to use (for e.g. checksums) + - name: Fetch local artifacts + uses: actions/download-artifact@v4 + with: + pattern: artifacts-* + path: target/distrib/ + merge-multiple: true + - id: cargo-dist + shell: bash + run: | + dist build ${{ needs.plan.outputs.tag-flag }} --output-format=json "--artifacts=global" > dist-manifest.json + echo "dist ran successfully" + + # Parse out what we just built and upload it to scratch storage + echo "paths<> "$GITHUB_OUTPUT" + jq --raw-output ".upload_files[]" dist-manifest.json >> "$GITHUB_OUTPUT" + echo "EOF" >> "$GITHUB_OUTPUT" + + cp dist-manifest.json "$BUILD_MANIFEST_NAME" + - name: "Upload artifacts" + uses: actions/upload-artifact@v4 + with: + name: artifacts-build-global + path: | + ${{ steps.cargo-dist.outputs.paths }} + ${{ env.BUILD_MANIFEST_NAME }} + # Determines if we should publish/announce + host: + needs: + - plan + - build-local-artifacts + - build-global-artifacts + # Only run if we're "publishing", and only if local and global didn't fail (skipped is fine) + if: ${{ always() && needs.plan.outputs.publishing == 'true' && (needs.build-global-artifacts.result == 'skipped' || needs.build-global-artifacts.result == 'success') && (needs.build-local-artifacts.result == 'skipped' || needs.build-local-artifacts.result == 'success') }} + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + runs-on: "ubuntu-20.04" + outputs: + val: ${{ steps.host.outputs.manifest }} + steps: + - uses: actions/checkout@v4 + with: + submodules: recursive + - name: Install cached dist + uses: actions/download-artifact@v4 + with: + name: cargo-dist-cache + path: ~/.cargo/bin/ + - run: chmod +x ~/.cargo/bin/dist + # Fetch artifacts from scratch-storage + - name: Fetch artifacts + uses: actions/download-artifact@v4 + with: + pattern: artifacts-* + path: target/distrib/ + merge-multiple: true + - id: host + shell: bash + run: | + dist host ${{ needs.plan.outputs.tag-flag }} --steps=upload --steps=release --output-format=json > dist-manifest.json + echo "artifacts uploaded and released successfully" + cat dist-manifest.json + echo "manifest=$(jq -c "." dist-manifest.json)" >> "$GITHUB_OUTPUT" + - name: "Upload dist-manifest.json" + uses: actions/upload-artifact@v4 + with: + # Overwrite the previous copy + name: artifacts-dist-manifest + path: dist-manifest.json + # Create a GitHub Release while uploading all files to it + - name: "Download GitHub Artifacts" + uses: actions/download-artifact@v4 + with: + pattern: artifacts-* + path: artifacts + merge-multiple: true + - name: Cleanup + run: | + # Remove the granular manifests + rm -f artifacts/*-dist-manifest.json + - name: Create GitHub Release + env: + PRERELEASE_FLAG: "${{ fromJson(steps.host.outputs.manifest).announcement_is_prerelease && '--prerelease' || '' }}" + ANNOUNCEMENT_TITLE: "${{ fromJson(steps.host.outputs.manifest).announcement_title }}" + ANNOUNCEMENT_BODY: "${{ fromJson(steps.host.outputs.manifest).announcement_github_body }}" + RELEASE_COMMIT: "${{ github.sha }}" + run: | + # Write and read notes from a file to avoid quoting breaking things + echo "$ANNOUNCEMENT_BODY" > $RUNNER_TEMP/notes.txt + + gh release create "${{ needs.plan.outputs.tag }}" --target "$RELEASE_COMMIT" $PRERELEASE_FLAG --title "$ANNOUNCEMENT_TITLE" --notes-file "$RUNNER_TEMP/notes.txt" artifacts/* + + announce: + needs: + - plan + - host + # use "always() && ..." to allow us to wait for all publish jobs while + # still allowing individual publish jobs to skip themselves (for prereleases). + # "host" however must run to completion, no skipping allowed! + if: ${{ always() && needs.host.result == 'success' }} + runs-on: "ubuntu-20.04" + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + steps: + - uses: actions/checkout@v4 + with: + submodules: recursive diff --git a/Cargo.lock b/Cargo.lock index f26fd3c..fd2bc64 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -64,12 +64,6 @@ dependencies = [ "vec_map", ] -[[package]] -name = "configparser" -version = "1.0.0" -source = "registry+/~https://github.com/rust-lang/crates.io-index" -checksum = "fe1d7dcda7d1da79e444bdfba1465f2f849a58b07774e1df473ee77030cb47a7" - [[package]] name = "filetime" version = "0.2.25" @@ -109,15 +103,6 @@ dependencies = [ "libc", ] -[[package]] -name = "ini" -version = "1.3.0" -source = "registry+/~https://github.com/rust-lang/crates.io-index" -checksum = "0a9271a5dfd4228fa56a78d7508a35c321639cc71f783bb7a5723552add87bce" -dependencies = [ - "configparser", -] - [[package]] name = "inotify" version = "0.10.2" @@ -332,20 +317,12 @@ checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" name = "ruff-tools" version = "0.1.0" dependencies = [ - "ini", "notify", "regex", - "rustc-serialize", "serde_json", "structopt", ] -[[package]] -name = "rustc-serialize" -version = "0.3.25" -source = "registry+/~https://github.com/rust-lang/crates.io-index" -checksum = "fe834bc780604f4674073badbad26d7219cadfb4a2275802db12cbae17498401" - [[package]] name = "ryu" version = "1.0.18" diff --git a/Cargo.toml b/Cargo.toml index e280260..4f3333b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -2,11 +2,32 @@ name = "ruff-tools" version = "0.1.0" edition = "2021" +rust-version = "1.80" +homepage = "/~https://github.com/purajit/ruff-tools" +documentation = "/~https://github.com/purajit/ruff-tools" +repository = "/~https://github.com/purajit/ruff-tools" +authors = ["Purajit Malalur"] +license = "MIT" [dependencies] -ini = "1.3.0" +# ini = "1.3.0" notify = "7.0.0" regex = "1.11.1" -rustc-serialize = "0.3.25" serde_json = "1.0.134" structopt = "0.3.26" + +# The profile that 'dist' will build with +[profile.dist] +inherits = "release" +lto = "thin" + +# Config for 'dist' +[workspace.metadata.dist] +# The preferred dist version to use in CI (Cargo.toml SemVer syntax) +cargo-dist-version = "0.27.0" +# CI backends to support +ci = "github" +# The installers to generate for each app +installers = [] +# Target platforms to build apps for (Rust target-triple syntax) +targets = ["aarch64-apple-darwin", "aarch64-unknown-linux-gnu", "x86_64-apple-darwin", "x86_64-unknown-linux-gnu", "x86_64-pc-windows-msvc"] diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..cf11487 --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2024 Purajit Malalur + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/src/cycle_detection.rs b/src/cycle_detection.rs index 2fd830b..b332b77 100644 --- a/src/cycle_detection.rs +++ b/src/cycle_detection.rs @@ -36,11 +36,8 @@ pub(crate) fn detect_cycles() { let mut hash_vec: Vec<_> = edge_frequencies.iter().collect(); hash_vec.sort_by(|a, b| b.1.cmp(a.1)); println!("Most frequently-appearing imports in cycles:"); - for i in 0..cmp::min(hash_vec.len(), 5) { - println!( - "{} {} -> {}", - hash_vec[i].1, hash_vec[i].0 .0, hash_vec[i].0 .1 - ); + for (edge, frequency) in hash_vec.iter().take(cmp::min(hash_vec.len(), 5)) { + println!("{} {} -> {}", frequency, edge.0, edge.1); } println!("Removing these imports \x1b[3mmight\x1b[0m help resolve several cyclic dependencies") } @@ -48,9 +45,9 @@ pub(crate) fn detect_cycles() { fn detect_cycles_in_graph(graph: &HashMap>) -> HashSet> { let mut cycles = HashSet::new(); for vertex in graph.keys() { - cycles.extend(get_cycles_from_vertex(&graph, vertex)); + cycles.extend(get_cycles_from_vertex(graph, vertex)); } - return cycles; + cycles } /// This is ported from pylint's cycle detection which is rather chaotic, @@ -67,12 +64,11 @@ fn get_cycles_from_vertex<'a>( // path, visited, node to explore stack.push((Vec::new(), vertex)); - while !stack.is_empty() { - let (path, vertex) = stack.pop().unwrap(); + while let Some((path, vertex)) = stack.pop() { match path.iter().position(|v| v == vertex) { Some(vertex_index) => { cycles.insert(super::minimize_cycles::minimize_cycle( - &graph, + graph, super::minimize_cycles::canonical_cycle(&path[vertex_index..]), )); } diff --git a/src/live.rs b/src/live.rs index c038c4d..64f8776 100644 --- a/src/live.rs +++ b/src/live.rs @@ -32,7 +32,7 @@ pub(crate) fn run_watcher( watcher.watch(Path::new("."), RecursiveMode::Recursive)?; println!("\x1b[93mListening! Ctrl-C to quit.\x1b[0m"); for res in rx { - let _ = match res { + match res { Ok(event) => match event.kind { Modify(ModifyKind::Name(_)) | Modify(ModifyKind::Data(_)) @@ -58,7 +58,7 @@ pub(crate) fn run_watcher( { return Some(sp.strip_prefix(&cwd).unwrap().to_string()); } - return None; + None }) .collect::>(); @@ -151,17 +151,17 @@ pub(crate) fn run_watcher( Ok(()) } -fn get_affected_files<'a>( - modified_files: &Vec, +fn get_affected_files( + modified_files: &[String], import_map_dependents: HashMap>, ) -> HashSet { // run a plain BFS of the dependents graph; all visited nodes are affected files let mut visited: HashSet = HashSet::new(); let mut queue: VecDeque = VecDeque::new(); - visited.extend(modified_files.clone()); - queue.extend(modified_files.clone()); + visited.extend(modified_files.to_owned()); + queue.extend(modified_files.to_owned()); while let Some(file) = queue.pop_front() { - match import_map_dependents.get(&String::from(file)) { + match import_map_dependents.get(&file) { Some(mi) => { for dependent_file in mi.iter() { if visited.insert(dependent_file.clone()) { diff --git a/src/main.rs b/src/main.rs index 155680b..6fe6d90 100644 --- a/src/main.rs +++ b/src/main.rs @@ -43,7 +43,6 @@ fn main() { RuffTools::CycleDetection(_) => cycle_detection::detect_cycles(), RuffTools::Live(cmd) => { let _ = live::run_watcher(cmd.cmd, cmd.paths); - () } _ => (), } diff --git a/src/minimize_cycles.rs b/src/minimize_cycles.rs index 259a2ef..f8fc818 100644 --- a/src/minimize_cycles.rs +++ b/src/minimize_cycles.rs @@ -18,15 +18,15 @@ fn cycle_size(c_len: usize, i: usize, j: usize) -> usize { pub(crate) fn canonical_cycle<'a>(c: &[&'a str]) -> Vec<&'a str> { let start_vertex = c.iter().min().unwrap(); let start_index = c.iter().position(|v| v == start_vertex).unwrap(); - return c[start_index..] + c[start_index..] .iter() .chain(&c[..start_index]) .cloned() - .collect(); + .collect() } /// Get the sub-cycle within c by using an edge from vertex index i to j -fn sub_cycle<'a>(c: &Vec<&'a str>, i: usize, j: usize) -> Vec<&'a str> { +fn sub_cycle<'a>(c: &[&'a str], i: usize, j: usize) -> Vec<&'a str> { let new_cycle = if i < j { &c[..(i + 1)] .iter() @@ -37,7 +37,7 @@ fn sub_cycle<'a>(c: &Vec<&'a str>, i: usize, j: usize) -> Vec<&'a str> { &c[j..(i + 1)] }; - return canonical_cycle(new_cycle); + canonical_cycle(new_cycle) } pub(crate) fn minimize_cycle<'a>( @@ -62,10 +62,10 @@ pub(crate) fn minimize_cycle<'a>( } } } - return match emsmallen { + match emsmallen { Some((i, j, _)) => sub_cycle(&cycle, i, j), None => cycle, - }; + } } pub(crate) fn minimize_cycles(cycles_results_file: String) { let graph = super::ruff_util::ruff_graph(true, false, None); @@ -74,10 +74,7 @@ pub(crate) fn minimize_cycles(cycles_results_file: String) { fs::read_to_string(cycles_results_file).expect("Should have been able to read the file"); let mut cycles = contents .split("\n") - .filter_map(|l| match l.find(" -> ") { - Some(_) => Some(l.split(" -> ").collect()), - None => None, - }) + .filter_map(|l| l.find(" -> ").map(|_| l.split(" -> ").collect())) .collect::>>(); println!("Pre-minimization"); @@ -91,13 +88,9 @@ pub(crate) fn minimize_cycles(cycles_results_file: String) { "longest cycle : {}", cycles.iter().map(|c| c.len()).max().unwrap() ); - // sort cycles by length, since larger cycles are likelier to be minimized, and this // makes it easier to grok the results and logs - cycles.sort_by(|a, b| a.len().cmp(&b.len())); - - // println!("GRAPH {:?}", graph); - // println!("CYCLES {:?}", cycles); + cycles.sort_by_key(|a| a.len()); let mut minimal_cycles = Vec::>::new(); for cycle in cycles { @@ -106,6 +99,10 @@ pub(crate) fn minimize_cycles(cycles_results_file: String) { // find number of unique cycles, total length of all cycles let unique_minimal_cycles = minimal_cycles.iter().collect::>(); + for cycle in &unique_minimal_cycles { + println!("{}", cycle.join(" -> ")); + } + println!("\nPost-minimization"); println!("# cycles : {}", unique_minimal_cycles.len()); println!( @@ -117,10 +114,6 @@ pub(crate) fn minimize_cycles(cycles_results_file: String) { "longest cycle : {}", unique_minimal_cycles.iter().map(|c| c.len()).max().unwrap() ); - - for cycle in &unique_minimal_cycles { - println!("{}", cycle.join(" -> ")); - } } #[cfg(test)] @@ -155,23 +148,23 @@ mod tests { #[test] fn test_sub_cycle() { // unchanged cycle - assert_eq!(sub_cycle(&vec!["a", "b", "c"], 2, 0), ["a", "b", "c"]); + assert_eq!(sub_cycle(&["a", "b", "c"], 2, 0), ["a", "b", "c"]); // unchanged cycle, just canonicalized - assert_eq!(sub_cycle(&vec!["b", "c", "a"], 2, 0), ["a", "b", "c"]); + assert_eq!(sub_cycle(&["b", "c", "a"], 2, 0), ["a", "b", "c"]); // shortcut - assert_eq!(sub_cycle(&vec!["a", "b", "c"], 1, 0), ["a", "b"]); + assert_eq!(sub_cycle(&["a", "b", "c"], 1, 0), ["a", "b"]); // should shortcut assert_eq!( - sub_cycle(&vec!["b", "c", "e", "a", "d"], 2, 4), + sub_cycle(&["b", "c", "e", "a", "d"], 2, 4), ["b", "c", "e", "d"] ); // should shortcut and canonicalize assert_eq!( - sub_cycle(&vec!["b", "a", "c", "e", "d"], 1, 3), + sub_cycle(&["b", "a", "c", "e", "d"], 1, 3), ["a", "e", "d", "b"] ); // should get contained cycle AND canonicalize - assert_eq!(sub_cycle(&vec!["b", "c", "a"], 2, 1), ["a", "c"]); + assert_eq!(sub_cycle(&["b", "c", "a"], 2, 1), ["a", "c"]); } /// these have only one possible option, and just test reading edges from the graph diff --git a/src/ruff_util.rs b/src/ruff_util.rs index fcc9e3a..5d5fbba 100644 --- a/src/ruff_util.rs +++ b/src/ruff_util.rs @@ -18,19 +18,14 @@ pub(crate) fn ruff_graph( } else { vec!["--direction", "dependents"] }) - .args(if paths.is_some() { - paths.unwrap() - } else { - Vec::::new() - }) + .args(paths.unwrap_or_default()) .output() .expect("failed to execute process"); let j: Value = serde_json::from_str::(str::from_utf8(&graph_output.stdout).unwrap()).unwrap(); - return j - .as_object() + j.as_object() .unwrap() .clone() .into_iter() @@ -42,8 +37,8 @@ pub(crate) fn ruff_graph( path_to_module(&k), v.as_array() .unwrap() - .into_iter() - .map(|i| path_to_module(&i.as_str().unwrap())) + .iter() + .map(|i| path_to_module(i.as_str().unwrap())) .collect::>(), ) } else { @@ -51,13 +46,13 @@ pub(crate) fn ruff_graph( k, v.as_array() .unwrap() - .into_iter() + .iter() .map(|i| i.as_str().unwrap().to_string()) .collect::>(), ) } }) - .collect::>(); + .collect::>() } fn path_to_module(path: &str) -> String { @@ -68,13 +63,13 @@ fn path_to_module(path: &str) -> String { .unwrap() .replace(&_module_path_with_extensions, "") .into(); - return match full_module_path.find("src.") { + match full_module_path.find("src.") { Some(src_index) => { let start_index = src_index + 4; // "src.".len() full_module_path[start_index..].to_string() } None => full_module_path, - }; + } } #[cfg(test)]