Skip to content

Commit

Permalink
Merge pull request #75 from rusty-ecma/housekeeping
Browse files Browse the repository at this point in the history
Housekeeping
  • Loading branch information
FreeMasen committed Nov 24, 2022
2 parents 83fbdab + fc3ccae commit 7428d60
Show file tree
Hide file tree
Showing 38 changed files with 441 additions and 1,370 deletions.
4 changes: 0 additions & 4 deletions .cargo/config

This file was deleted.

15 changes: 6 additions & 9 deletions .github/workflows/rust.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,9 @@ jobs:
runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v1
- uses: actions/checkout@v3
- name: Setup Node.js for use with actions
uses: actions/setup-node@v2.4.0
uses: actions/setup-node@v3
- name: install js test libs from npm
run: npm install
- name: Build
Expand All @@ -19,35 +19,32 @@ jobs:
run: curl https://hg.mozilla.org/mozilla-central/archive/tip.zip/js/src/jit-test/tests/ --output moz-central.zip
- name: unzip moz-central
run: unzip -qq moz-central.zip -d moz-central
# - name: clone Test262
# run: git clone https://github.com/tc39/test262 ./test262_full && mv ./test262_full/test ./test262
- name: Run tests
run: cargo test --release --features=moz_central
env:
RUST_MIN_STACK: 9999999
- name: rust-tarpaulin
uses: actions-rs/tarpaulin@v0.1.0
uses: actions-rs/tarpaulin@v0.1
env:
RUST_MIN_STACK: 9999999
ACTIONS_ALLOW_UNSECURE_COMMANDS: true
with:
args: --features=moz_central
- name: Upload to codecov.io
uses: codecov/codecov-action@v1.0.2
uses: codecov/codecov-action@v3
continue-on-error: true
with:
token: ${{secrets.CODECOV_TOKEN}}
- name: Check syntax
run: cargo fmt --all -- --check
- name: Cache node_modules
uses: actions/cache@v1.0.3
uses: actions/cache@v3
with:
path: ./node_modules
key: ${{ runner.os }}.node_modules
- name: before cargo cache
run: rm -rf ~/.cargo/registry
- name: Cache cargo directory
uses: actions/cache@v1.0.3
uses: actions/cache@v3
with:
key: ${{ runner.os }}.cargo
path: ~/.cargo
36 changes: 23 additions & 13 deletions CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,13 +10,33 @@ two people working on the same issue

## Testing

### Memory Issues

The parsers defined here are recursive decent parsers, meaning they heavily rely on recursion
which ends up being problematic for the stack size. For running tests it is recommended to use
the environment variable `RUST_MIN_STACK` set to `9999999` (7 nines). Otherwise you will probably
encounter the error:

```sh
thread '<test-name>' has overflowed its stack
fatal runtime error: stack overflow
error: test failed, to rerun pass `--test <test>`

Caused by:
process didn't exit successfully: `<path-to-test>-<test-sha>` (signal: 6, SIGABRT: process abort signal)
```
[See this issue for more details](https://github.com/rusty-ecma/RESSA/issues/76)
### Extra Files
There are a few sets of JavaScript files that are required to run the tests in this repository.
### NPM files
#### NPM files
This set can be easily acquired by running `npm install` in the root of this project.
This set can be easily acquired by running `npm install` in the root of this project.
### Spider Monkey Files
#### Spider Monkey Files
An additional test is also available behind a feature flag `moz_central` that requires the JIT Test files from the FireFox repository, the expectation is that these will exist in the folder `moz-central` in the root of this project. To get these files you can either manually download and unzip them by following [this link](https://hg.mozilla.org/mozilla-central/archive/tip.zip/js/src/jit-test/tests/) or you can execute the following command.
Expand All @@ -30,13 +50,3 @@ To run these tests simply execute the following command.
```sh
cargo test --features moz_central -- moz_central
```

### Test262

Another test that is feature gated due to the time it takes to run parses all 30,000+ files in the [Test262](https://github.com/tc39/test262) test suite. The expectation is that the test folder from that repository is in the root of the project with the name test262.

```sh
curl -L https://github.com/tc39/test262/zipball/master -o test262.zip
unzip -q test262.zip -d test262_full
mv ./test262_full/test ./test262
```
38 changes: 16 additions & 22 deletions Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,50 +1,43 @@
[package]
name = "ressa"
version = "0.8.0"
authors = ["Robert Masen <r@robertmasen.pizza>"]
authors = ["Robert Masen <r.f.masen@gmail.com>"]
repository = "https://github.com/rusty-ecma/RESSA"
description = "An ECMAscript parser"
license = "MIT"
readme = "./README.md"
keywords = ["JavaScript", "parsing", "JS", "ES", "ECMA"]
categories = ["parsing", "text-processing", "web-programming"]
edition = "2018"
edition = "2021"

[dependencies]
backtrace = "0.3"
hash-chain = "0.3"
log = "0.4"
ress = "0.11"
resast = "0.5"
log = "0.4"
walkdir = { version = "2", optional = true }
indicatif = { version = "0.12", optional = true, features = ["with_rayon"] }
rayon = { version = "1", optional = true }
pulldown-cmark = { version = "0.7", optional = true }
res-regex = "0.1"
hash-chain = "0.3"
tracing = "0.1"

[features]
default = []
major_libs = []
# This feature populates a field on the Parser `_look_ahead` that will contain a debug format
# string of the look_ahead token. Very helpful when debugging this crate with gdb/lldb as sometimes
# the property shape of the `Token` isn't formatted well
debug_look_ahead = []
error_backtrace = []
serialization = ["resast/serialization"]
esprima = ["resast/esprima"]
moz_central = ["walkdir"]
test_262 = ["indicatif", "rayon", "pulldown-cmark"]
test_262_parser = ["indicatif", "rayon", "pulldown-cmark"]
# This feature disables the moz_central tests by default as they tend to run long on most
# development machines and require a larger minimum stack size to pass
moz_central = []

[dev-dependencies]
criterion = "0.3"
criterion = "0.4"
docopt = "1"
env_logger = "0.6"
env_logger = "0.9"
insta = "1.19"
lazy_static = "1"
serde = { version = "1", features = ["derive"] }
serde_json = "1"
serde_yaml = "0.9"
term = "0.6"
term-painter = "0.2.4"
term-painter = "0.3"
walkdir = "2"

[[bench]]
Expand All @@ -54,8 +47,9 @@ harness = false
[[example]]
name = "js-to-json"
path = "examples/js_to_json.rs"
required-features = ["serialization"]
required-features = ["resast/serialization"]

[[example]]
name = "js-to-json-esprima"
path = "examples/js_to_json-esprima.rs"
required-features = ["esprima"]
required-features = ["resast/esprima"]
15 changes: 12 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
# RESSA

[![Rust](https://github.com/rusty-ecma/RESSA/workflows/Rust/badge.svg?branch=featureless_test262)](https://github.com/rusty-ecma/RESSA/actions)

[![crates.io](https://img.shields.io/crates/v/ressa.svg)](https://crates.io/crates/ressa)
[![last commit master](https://img.shields.io/github/last-commit/FreeMasen/RESSA.svg)](https://github.com/FreeMasen/RESSA/commits/master)
# RESSA

> Rust EcmaScript Syntax Analyzer
This project is part of a series of crates designed to enable developers to create JavaScript development tools using the Rust programming language. [Rusty ECMA Details](#rusty-ecma-details)
Expand All @@ -13,9 +15,13 @@ The two major pieces that users will interact with are the `Parser` struct and t

The parser struct will be the main way to convert text into an `AST`.
Conveniently `Parser` implements `Iterator` over `Result<ProgramPart, Error>`,
this means that you can evaluate your JS in pieces from top to bottom.
this means that you can evaluate your JS in pieces from top to bottom.

> Note: By default the `Parser` will not be able to handle js module features,
> [see the module example](./examples/simple_module.rs) for details on how to parse js modules
### Iterator Example

```rust
use resast::prelude::*;
use ressa::*;
Expand Down Expand Up @@ -44,6 +50,7 @@ fn main() {
Another way to interact with a `Parser` would be to utilize the `parse` method. This method will iterate over all of the found `ProgramParts` and collect them into a `Program`,

### Parse Example

```rust
use ressa::{
Parser,
Expand All @@ -66,12 +73,14 @@ function Thing() {
Once you get to the inner `parts` of a `Program` you have a `Vec<ProgramPart>` which will operate the same as the [iterator example](#iterator-example)

# Rusty ECMA Details

## The Rust ECMA Crates

- [RESS](https://github.com/freemasen/ress) - Tokenizer or Scanner
- [RESSA](https://github.com/freemasen/ressa) - Parser
- [RESAST](https://github.com/freemasen/resast) - AST
- [RESW](https://github.com/freemasen/resw) - Writer

## Why So Many?
While much of what each crate provides is closely coupled with the other crates, the main goal is to provide the largest amount of customizability. For example, someone writing a fuzzer would only need the `RESAST` and `RESW`, it seems silly to require that they also pull in `RESS` and `RESSA` needlessly.

While much of what each crate provides is closely coupled with the other crates, the main goal is to provide the largest amount of customizability. For example, someone writing a fuzzer would only need the `RESAST` and `RESW`, it seems silly to require that they also pull in `RESS` and `RESSA` needlessly.
10 changes: 2 additions & 8 deletions examples/js_to_json-esprima.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
use docopt::Docopt;
use serde::{Deserialize, Serialize};
use serde::Deserialize;
use std::{
error::Error,
ffi::OsStr,
Expand Down Expand Up @@ -29,12 +29,6 @@ struct Args {
flag_out: Option<PathBuf>,
}

#[cfg(not(feature = "esprima"))]
fn main() {
println!("Please run again with --features esprima");
}

#[cfg(feature = "esprima")]
fn main() -> Result<(), Box<dyn Error>> {
let args: Args = Docopt::new(USAGE)
.and_then(|o| o.deserialize())
Expand All @@ -54,7 +48,7 @@ fn main() -> Result<(), Box<dyn Error>> {
}
Ok(())
}
#[cfg(feature = "esprima")]

fn gen_json(from: PathBuf, pretty: bool) -> Result<String, Box<dyn Error>> {
let js = read_to_string(&from)?;
let mut p = Parser::builder()
Expand Down
9 changes: 2 additions & 7 deletions examples/js_to_json.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
use docopt::Docopt;
use serde::Deserialize;
use std::{
error::Error,
ffi::OsStr,
Expand Down Expand Up @@ -28,12 +29,6 @@ struct Args {
flag_out: Option<PathBuf>,
}

#[cfg(not(feature = "serialization"))]
fn main() {
println!("Please run again with --features serialization");
}

#[cfg(feature = "serialization")]
fn main() -> Result<(), Box<dyn Error>> {
let args: Args = Docopt::new(USAGE)
.and_then(|o| o.deserialize())
Expand All @@ -53,7 +48,7 @@ fn main() -> Result<(), Box<dyn Error>> {
}
Ok(())
}
#[cfg(feature = "serialization")]

fn gen_json(from: PathBuf, pretty: bool) -> Result<String, Box<dyn Error>> {
let js = read_to_string(&from)?;
let mut p = Parser::builder()
Expand Down
3 changes: 0 additions & 3 deletions run_test262.ps1

This file was deleted.

3 changes: 0 additions & 3 deletions run_test262.sh

This file was deleted.

File renamed without changes.
10 changes: 5 additions & 5 deletions src/formal_params.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ pub struct FormalsList<'a> {

pub fn have_duplicates<'a>(params: &[Param<'a>]) -> bool {
if let Err(first_dupe) = find_duplicate(params) {
error!("Found duplicate parameter: {}", first_dupe);
log::error!("Found duplicate parameter: {}", first_dupe);
true
} else {
false
Expand Down Expand Up @@ -52,7 +52,7 @@ pub fn update_with_expr<'a>(
set: &mut HashSet<Cow<'a, str>>,
) -> Result<(), Cow<'a, str>> {
use resast::spanned::expr::{AssignExpr, AssignLeft};
trace!("update_with_expr {:?} {:?}", expr, set);
log::trace!("update_with_expr {:?} {:?}", expr, set);
match expr {
resast::spanned::expr::Expr::Ident(id) => {
if !set.insert(id.slice.source.clone()) {
Expand Down Expand Up @@ -87,7 +87,7 @@ pub fn update_with_pat<'a>(
pat: &resast::spanned::pat::Pat<'a>,
set: &mut HashSet<Cow<'a, str>>,
) -> Result<(), Cow<'a, str>> {
trace!("update_with_pat {:?} {:?}", pat, set);
log::trace!("update_with_pat {:?} {:?}", pat, set);
match pat {
Pat::Ident(id) => {
if !set.insert(id.slice.source.clone()) {
Expand Down Expand Up @@ -151,7 +151,7 @@ fn update_with_prop_value<'a>(
prop: &PropValue<'a>,
set: &mut HashSet<Cow<'a, str>>,
) -> Result<(), Cow<'a, str>> {
trace!("update_with_prop {:?}, {:?}", prop, set);
log::trace!("update_with_prop {:?}, {:?}", prop, set);
match &prop {
PropValue::Expr(expr) => {
update_with_expr(expr, set)?;
Expand All @@ -176,7 +176,7 @@ fn update_with_prop_key<'a>(
}

fn update_with_lit<'a>(lit: &Lit<'a>, set: &mut HashSet<Cow<'a, str>>) -> Result<(), Cow<'a, str>> {
trace!("update_with_lit {:?}, {:?}", lit, set);
log::trace!("update_with_lit {:?}, {:?}", lit, set);
if let Lit::String(s) = lit {
if !set.insert(s.content.source.clone()) {
return Err(s.content.source.clone());
Expand Down
Loading

0 comments on commit 7428d60

Please sign in to comment.