From 125c87cdcbfe658bf7ed43ef0ec70b2e1c0d11cb Mon Sep 17 00:00:00 2001 From: Robert Masen Date: Thu, 24 Nov 2022 11:14:53 -0600 Subject: [PATCH 01/22] chore: update setup-node action to v3 --- .github/workflows/rust.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index ba148ff..9aaf975 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -10,7 +10,7 @@ jobs: steps: - uses: actions/checkout@v1 - name: Setup Node.js for use with actions - uses: actions/setup-node@v2.4.0 + uses: actions/setup-node@v3 - name: install js test libs from npm run: npm install - name: Build From 42f05d994a488ae28a8d4745b9adc409c9c82b8d Mon Sep 17 00:00:00 2001 From: Robert Masen Date: Thu, 24 Nov 2022 11:18:51 -0600 Subject: [PATCH 02/22] chore: update checkout action to v2 --- .github/workflows/rust.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index 9aaf975..749b6dd 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -8,7 +8,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v1 + - uses: actions/checkout@v2 - name: Setup Node.js for use with actions uses: actions/setup-node@v3 - name: install js test libs from npm From 490f67df79615e58c2c1267495187c1f6cb5e967 Mon Sep 17 00:00:00 2001 From: Robert Masen Date: Thu, 24 Nov 2022 11:21:03 -0600 Subject: [PATCH 03/22] chore: remove patch version from tarpaulin action --- .github/workflows/rust.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index 749b6dd..08080c6 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -26,10 +26,9 @@ jobs: env: RUST_MIN_STACK: 9999999 - name: rust-tarpaulin - uses: actions-rs/tarpaulin@v0.1.0 + uses: actions-rs/tarpaulin@v0.1 env: RUST_MIN_STACK: 9999999 - ACTIONS_ALLOW_UNSECURE_COMMANDS: true with: args: --features=moz_central - name: Upload to codecov.io From 234fbab19030962ad4712ab3105792ee022ddb00 Mon Sep 17 00:00:00 2001 From: Robert Masen Date: Thu, 24 Nov 2022 11:22:22 -0600 Subject: [PATCH 04/22] chore: update codecov action to v3 --- .github/workflows/rust.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index 08080c6..d83ae3d 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -32,7 +32,7 @@ jobs: with: args: --features=moz_central - name: Upload to codecov.io - uses: codecov/codecov-action@v1.0.2 + uses: codecov/codecov-action@v3 continue-on-error: true with: token: ${{secrets.CODECOV_TOKEN}} From 75165ad48f87bff085d46fa6c5faa6c655c87e26 Mon Sep 17 00:00:00 2001 From: Robert Masen Date: Thu, 24 Nov 2022 11:27:22 -0600 Subject: [PATCH 05/22] chore: remove test262 this will be moved to its own project --- .github/workflows/rust.yml | 2 - tests/test262/addLineNumbers.js | 30 -- tests/test262/hilight.js | 2 - tests/test262/main.rs | 870 -------------------------------- tests/test262/removeNegative.js | 26 - tests/test262/style.css | 54 -- 6 files changed, 984 deletions(-) delete mode 100644 tests/test262/addLineNumbers.js delete mode 100644 tests/test262/hilight.js delete mode 100644 tests/test262/main.rs delete mode 100644 tests/test262/removeNegative.js delete mode 100644 tests/test262/style.css diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index d83ae3d..68d0e37 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -19,8 +19,6 @@ jobs: run: curl https://hg.mozilla.org/mozilla-central/archive/tip.zip/js/src/jit-test/tests/ --output moz-central.zip - name: unzip moz-central run: unzip -qq moz-central.zip -d moz-central - # - name: clone Test262 - # run: git clone https://github.com/tc39/test262 ./test262_full && mv ./test262_full/test ./test262 - name: Run tests run: cargo test --release --features=moz_central env: diff --git a/tests/test262/addLineNumbers.js b/tests/test262/addLineNumbers.js deleted file mode 100644 index e84b9b2..0000000 --- a/tests/test262/addLineNumbers.js +++ /dev/null @@ -1,30 +0,0 @@ -function addNumbers() { - const codeEl = document.querySelector('.language-js'); - let lineCount = codeEl.textContent.split('\n').length; - let gutterWidth = lineCount > 99 ? 50 : 25; - const gutter = document.createElement('div'); - gutter.setAttribute('class', 'code-gutter'); - document.body.appendChild(gutter); - gutter.style.top = codeEl.offsetTop; - gutter.style.height = codeEl.clientHeight; - gutter.style.top = codeEl.offsetTop; - gutter.style.left = codeEl.offsetLeft - gutterWidth; - gutter.style.width = `${gutterWidth}px`; - for (var i = 0; i < lineCount - 1; i++) { - let num = document.createElement('span'); - num.innerText = `${i + 1}.`; - num.classList.add('gutter-number'); - gutter.appendChild(num); - } -} -function highlight() { - const blocks = document.querySelectorAll('pre code') - for (const block of blocks) { - hljs.highlightBlock(block); - } -} - -window.addEventListener('DOMContentLoaded', () => { - highlight(); - addNumbers(); -}); \ No newline at end of file diff --git a/tests/test262/hilight.js b/tests/test262/hilight.js deleted file mode 100644 index 9586bd2..0000000 --- a/tests/test262/hilight.js +++ /dev/null @@ -1,2 +0,0 @@ -/*! highlight.js v9.15.10 | BSD3 License | git.io/hljslicense */ -!function(e){var t="object"==typeof window&&window||"object"==typeof self&&self;"undefined"==typeof exports||exports.nodeType?t&&(t.hljs=e({}),"function"==typeof define&&define.amd&&define([],function(){return t.hljs})):e(exports)}(function(n){var d=[],o=Object.keys,h={},c={},t=/^(no-?highlight|plain|text)$/i,l=/\blang(?:uage)?-([\w-]+)\b/i,r=/((^(<[^>]+>|\t|)+|(?:\n)))/gm,a={case_insensitive:"cI",lexemes:"l",contains:"c",keywords:"k",subLanguage:"sL",className:"cN",begin:"b",beginKeywords:"bK",end:"e",endsWithParent:"eW",illegal:"i",excludeBegin:"eB",excludeEnd:"eE",returnBegin:"rB",returnEnd:"rE",relevance:"r",variants:"v",IDENT_RE:"IR",UNDERSCORE_IDENT_RE:"UIR",NUMBER_RE:"NR",C_NUMBER_RE:"CNR",BINARY_NUMBER_RE:"BNR",RE_STARTERS_RE:"RSR",BACKSLASH_ESCAPE:"BE",APOS_STRING_MODE:"ASM",QUOTE_STRING_MODE:"QSM",PHRASAL_WORDS_MODE:"PWM",C_LINE_COMMENT_MODE:"CLCM",C_BLOCK_COMMENT_MODE:"CBCM",HASH_COMMENT_MODE:"HCM",NUMBER_MODE:"NM",C_NUMBER_MODE:"CNM",BINARY_NUMBER_MODE:"BNM",CSS_NUMBER_MODE:"CSSNM",REGEXP_MODE:"RM",TITLE_MODE:"TM",UNDERSCORE_TITLE_MODE:"UTM",COMMENT:"C",beginRe:"bR",endRe:"eR",illegalRe:"iR",lexemesRe:"lR",terminators:"t",terminator_end:"tE"},N="",v={classPrefix:"hljs-",tabReplace:null,useBR:!1,languages:void 0};function y(e){return e.replace(/&/g,"&").replace(//g,">")}function b(e){return e.nodeName.toLowerCase()}function w(e,t){var r=e&&e.exec(t);return r&&0===r.index}function u(e){return t.test(e)}function p(e){var t,r={},a=Array.prototype.slice.call(arguments,1);for(t in e)r[t]=e[t];return a.forEach(function(e){for(t in e)r[t]=e[t]}),r}function m(e){var n=[];return function e(t,r){for(var a=t.firstChild;a;a=a.nextSibling)3===a.nodeType?r+=a.nodeValue.length:1===a.nodeType&&(n.push({event:"start",offset:r,node:a}),r=e(a,r),b(a).match(/br|hr|img|input/)||n.push({event:"stop",offset:r,node:a}));return r}(e,0),n}function i(e){if(a&&!e.langApiRestored){for(var t in e.langApiRestored=!0,a)e[t]&&(e[a[t]]=e[t]);(e.c||[]).concat(e.v||[]).forEach(i)}}function E(s){function l(e){return e&&e.source||e}function c(e,t){return new RegExp(l(e),"m"+(s.cI?"i":"")+(t?"g":""))}!function t(r,e){if(!r.compiled){if(r.compiled=!0,r.k=r.k||r.bK,r.k){function a(r,e){s.cI&&(e=e.toLowerCase()),e.split(" ").forEach(function(e){var t=e.split("|");n[t[0]]=[r,t[1]?Number(t[1]):1]})}var n={};"string"==typeof r.k?a("keyword",r.k):o(r.k).forEach(function(e){a(e,r.k[e])}),r.k=n}r.lR=c(r.l||/\w+/,!0),e&&(r.bK&&(r.b="\\b("+r.bK.split(" ").join("|")+")\\b"),r.b||(r.b=/\B|\b/),r.bR=c(r.b),r.endSameAsBegin&&(r.e=r.b),r.e||r.eW||(r.e=/\B|\b/),r.e&&(r.eR=c(r.e)),r.tE=l(r.e)||"",r.eW&&e.tE&&(r.tE+=(r.e?"|":"")+e.tE)),r.i&&(r.iR=c(r.i)),null==r.r&&(r.r=1),r.c||(r.c=[]),r.c=Array.prototype.concat.apply([],r.c.map(function(e){return function(t){return t.v&&!t.cached_variants&&(t.cached_variants=t.v.map(function(e){return p(t,{v:null},e)})),t.cached_variants||t.eW&&[p(t)]||[t]}("self"===e?r:e)})),r.c.forEach(function(e){t(e,r)}),r.starts&&t(r.starts,e);var i=r.c.map(function(e){return e.bK?"\\.?(?:"+e.b+")\\.?":e.b}).concat([r.tE,r.i]).map(l).filter(Boolean);r.t=i.length?c(function(e,t){for(var r=/\[(?:[^\\\]]|\\.)*\]|\(\??|\\([1-9][0-9]*)|\\./,a=0,n="",i=0;i')+t+(r?"":N):t}function s(){b+=null!=u.sL?function(){var e="string"==typeof u.sL;if(e&&!h[u.sL])return y(p);var t=e?k(u.sL,p,!0,d[u.sL]):x(p,u.sL.length?u.sL:void 0);return 0")+'"');return p+=t,t.length||1}var l=M(e);if(!l)throw new Error('Unknown language: "'+e+'"');E(l);var n,u=r||l,d={},b="";for(n=u;n!==l;n=n.parent)n.cN&&(b=c(n.cN,"",!0)+b);var p="",m=0;try{for(var f,g,_=0;u.t.lastIndex=_,f=u.t.exec(t);)g=a(t.substring(_,f.index),f[0]),_=f.index+g;for(a(t.substr(_)),n=u;n.parent;n=n.parent)n.cN&&(b+=N);return{r:m,value:b,language:e,top:u}}catch(e){if(e.message&&-1!==e.message.indexOf("Illegal"))return{r:0,value:y(t)};throw e}}function x(r,e){e=e||v.languages||o(h);var a={r:0,value:y(r)},n=a;return e.filter(M).filter(_).forEach(function(e){var t=k(e,r,!1);t.language=e,t.r>n.r&&(n=t),t.r>a.r&&(n=a,a=t)}),n.language&&(a.second_best=n),a}function f(e){return v.tabReplace||v.useBR?e.replace(r,function(e,t){return v.useBR&&"\n"===e?"
":v.tabReplace?t.replace(/\t/g,v.tabReplace):""}):e}function s(e){var t,r,a,n,i,s=function(e){var t,r,a,n,i=e.className+" ";if(i+=e.parentNode?e.parentNode.className:"",r=l.exec(i))return M(r[1])?r[1]:"no-highlight";for(t=0,a=(i=i.split(/\s+/)).length;t/g,"\n"):t=e,i=t.textContent,a=s?k(s,i,!0):x(i),(r=m(t)).length&&((n=document.createElementNS("http://www.w3.org/1999/xhtml","div")).innerHTML=a.value,a.value=function(e,t,r){var a=0,n="",i=[];function s(){return e.length&&t.length?e[0].offset!==t[0].offset?e[0].offset"}function o(e){n+=""}function l(e){("start"===e.event?c:o)(e.node)}for(;e.length||t.length;){var u=s();if(n+=y(r.substring(a,u[0].offset)),a=u[0].offset,u===e){for(i.reverse().forEach(o);l(u.splice(0,1)[0]),(u=s())===e&&u.length&&u[0].offset===a;);i.reverse().forEach(c)}else"start"===u[0].event?i.push(u[0].node):i.pop(),l(u.splice(0,1)[0])}return n+y(r.substr(a))}(r,m(n),i)),a.value=f(a.value),e.innerHTML=a.value,e.className=function(e,t,r){var a=t?c[t]:r,n=[e.trim()];return e.match(/\bhljs\b/)||n.push("hljs"),-1===e.indexOf(a)&&n.push(a),n.join(" ").trim()}(e.className,s,a.language),e.result={language:a.language,re:a.r},a.second_best&&(e.second_best={language:a.second_best.language,re:a.second_best.r}))}function g(){if(!g.called){g.called=!0;var e=document.querySelectorAll("pre code");d.forEach.call(e,s)}}function M(e){return e=(e||"").toLowerCase(),h[e]||h[c[e]]}function _(e){var t=M(e);return t&&!t.disableAutodetect}return n.highlight=k,n.highlightAuto=x,n.fixMarkup=f,n.highlightBlock=s,n.configure=function(e){v=p(v,e)},n.initHighlighting=g,n.initHighlightingOnLoad=function(){addEventListener("DOMContentLoaded",g,!1),addEventListener("load",g,!1)},n.registerLanguage=function(t,e){var r=h[t]=e(n);i(r),r.aliases&&r.aliases.forEach(function(e){c[e]=t})},n.listLanguages=function(){return o(h)},n.getLanguage=M,n.autoDetection=_,n.inherit=p,n.IR=n.IDENT_RE="[a-zA-Z]\\w*",n.UIR=n.UNDERSCORE_IDENT_RE="[a-zA-Z_]\\w*",n.NR=n.NUMBER_RE="\\b\\d+(\\.\\d+)?",n.CNR=n.C_NUMBER_RE="(-?)(\\b0[xX][a-fA-F0-9]+|(\\b\\d+(\\.\\d*)?|\\.\\d+)([eE][-+]?\\d+)?)",n.BNR=n.BINARY_NUMBER_RE="\\b(0b[01]+)",n.RSR=n.RE_STARTERS_RE="!|!=|!==|%|%=|&|&&|&=|\\*|\\*=|\\+|\\+=|,|-|-=|/=|/|:|;|<<|<<=|<=|<|===|==|=|>>>=|>>=|>=|>>>|>>|>|\\?|\\[|\\{|\\(|\\^|\\^=|\\||\\|=|\\|\\||~",n.BE=n.BACKSLASH_ESCAPE={b:"\\\\[\\s\\S]",r:0},n.ASM=n.APOS_STRING_MODE={cN:"string",b:"'",e:"'",i:"\\n",c:[n.BE]},n.QSM=n.QUOTE_STRING_MODE={cN:"string",b:'"',e:'"',i:"\\n",c:[n.BE]},n.PWM=n.PHRASAL_WORDS_MODE={b:/\b(a|an|the|are|I'm|isn't|don't|doesn't|won't|but|just|should|pretty|simply|enough|gonna|going|wtf|so|such|will|you|your|they|like|more)\b/},n.C=n.COMMENT=function(e,t,r){var a=n.inherit({cN:"comment",b:e,e:t,c:[]},r||{});return a.c.push(n.PWM),a.c.push({cN:"doctag",b:"(?:TODO|FIXME|NOTE|BUG|XXX):",r:0}),a},n.CLCM=n.C_LINE_COMMENT_MODE=n.C("//","$"),n.CBCM=n.C_BLOCK_COMMENT_MODE=n.C("/\\*","\\*/"),n.HCM=n.HASH_COMMENT_MODE=n.C("#","$"),n.NM=n.NUMBER_MODE={cN:"number",b:n.NR,r:0},n.CNM=n.C_NUMBER_MODE={cN:"number",b:n.CNR,r:0},n.BNM=n.BINARY_NUMBER_MODE={cN:"number",b:n.BNR,r:0},n.CSSNM=n.CSS_NUMBER_MODE={cN:"number",b:n.NR+"(%|em|ex|ch|rem|vw|vh|vmin|vmax|cm|mm|in|pt|pc|px|deg|grad|rad|turn|s|ms|Hz|kHz|dpi|dpcm|dppx)?",r:0},n.RM=n.REGEXP_MODE={cN:"regexp",b:/\//,e:/\/[gimuy]*/,i:/\n/,c:[n.BE,{b:/\[/,e:/\]/,r:0,c:[n.BE]}]},n.TM=n.TITLE_MODE={cN:"title",b:n.IR,r:0},n.UTM=n.UNDERSCORE_TITLE_MODE={cN:"title",b:n.UIR,r:0},n.METHOD_GUARD={b:"\\.\\s*"+n.UIR,r:0},n.registerLanguage("apache",function(e){var t={cN:"number",b:"[\\$%]\\d+"};return{aliases:["apacheconf"],cI:!0,c:[e.HCM,{cN:"section",b:""},{cN:"attribute",b:/\w+/,r:0,k:{nomarkup:"order deny allow setenv rewriterule rewriteengine rewritecond documentroot sethandler errordocument loadmodule options header listen serverroot servername"},starts:{e:/$/,r:0,k:{literal:"on off all"},c:[{cN:"meta",b:"\\s\\[",e:"\\]$"},{cN:"variable",b:"[\\$%]\\{",e:"\\}",c:["self",t]},t,e.QSM]}}],i:/\S/}}),n.registerLanguage("bash",function(e){var t={cN:"variable",v:[{b:/\$[\w\d#@][\w\d_]*/},{b:/\$\{(.*?)}/}]},r={cN:"string",b:/"/,e:/"/,c:[e.BE,t,{cN:"variable",b:/\$\(/,e:/\)/,c:[e.BE]}]};return{aliases:["sh","zsh"],l:/\b-?[a-z\._]+\b/,k:{keyword:"if then else elif fi for while in do done case esac function",literal:"true false",built_in:"break cd continue eval exec exit export getopts hash pwd readonly return shift test times trap umask unset alias bind builtin caller command declare echo enable help let local logout mapfile printf read readarray source type typeset ulimit unalias set shopt autoload bg bindkey bye cap chdir clone comparguments compcall compctl compdescribe compfiles compgroups compquote comptags comptry compvalues dirs disable disown echotc echoti emulate fc fg float functions getcap getln history integer jobs kill limit log noglob popd print pushd pushln rehash sched setcap setopt stat suspend ttyctl unfunction unhash unlimit unsetopt vared wait whence where which zcompile zformat zftp zle zmodload zparseopts zprof zpty zregexparse zsocket zstyle ztcp",_:"-ne -eq -lt -gt -f -d -e -s -l -a"},c:[{cN:"meta",b:/^#![^\n]+sh\s*$/,r:10},{cN:"function",b:/\w[\w\d_]*\s*\(\s*\)\s*\{/,rB:!0,c:[e.inherit(e.TM,{b:/\w[\w\d_]*/})],r:0},e.HCM,r,{cN:"",b:/\\"/},{cN:"string",b:/'/,e:/'/},t]}}),n.registerLanguage("coffeescript",function(e){var t={keyword:"in if for while finally new do return else break catch instanceof throw try this switch continue typeof delete debugger super yield import export from as default await then unless until loop of by when and or is isnt not",literal:"true false null undefined yes no on off",built_in:"npm require console print module global window document"},r="[A-Za-z$_][0-9A-Za-z$_]*",a={cN:"subst",b:/#\{/,e:/}/,k:t},n=[e.BNM,e.inherit(e.CNM,{starts:{e:"(\\s*/)?",r:0}}),{cN:"string",v:[{b:/'''/,e:/'''/,c:[e.BE]},{b:/'/,e:/'/,c:[e.BE]},{b:/"""/,e:/"""/,c:[e.BE,a]},{b:/"/,e:/"/,c:[e.BE,a]}]},{cN:"regexp",v:[{b:"///",e:"///",c:[a,e.HCM]},{b:"//[gim]*",r:0},{b:/\/(?![ *])(\\\/|.)*?\/[gim]*(?=\W|$)/}]},{b:"@"+r},{sL:"javascript",eB:!0,eE:!0,v:[{b:"```",e:"```"},{b:"`",e:"`"}]}];a.c=n;var i=e.inherit(e.TM,{b:r}),s="(\\(.*\\))?\\s*\\B[-=]>",c={cN:"params",b:"\\([^\\(]",rB:!0,c:[{b:/\(/,e:/\)/,k:t,c:["self"].concat(n)}]};return{aliases:["coffee","cson","iced"],k:t,i:/\/\*/,c:n.concat([e.C("###","###"),e.HCM,{cN:"function",b:"^\\s*"+r+"\\s*=\\s*"+s,e:"[-=]>",rB:!0,c:[i,c]},{b:/[:\(,=]\s*/,r:0,c:[{cN:"function",b:s,e:"[-=]>",rB:!0,c:[c]}]},{cN:"class",bK:"class",e:"$",i:/[:="\[\]]/,c:[{bK:"extends",eW:!0,i:/[:="\[\]]/,c:[i]},i]},{b:r+":",e:":",rB:!0,rE:!0,r:0}])}}),n.registerLanguage("cpp",function(e){var t={cN:"keyword",b:"\\b[a-z\\d_]*_t\\b"},r={cN:"string",v:[{b:'(u8?|U|L)?"',e:'"',i:"\\n",c:[e.BE]},{b:/(?:u8?|U|L)?R"([^()\\ ]{0,16})\((?:.|\n)*?\)\1"/},{b:"'\\\\?.",e:"'",i:"."}]},a={cN:"number",v:[{b:"\\b(0b[01']+)"},{b:"(-?)\\b([\\d']+(\\.[\\d']*)?|\\.[\\d']+)(u|U|l|L|ul|UL|f|F|b|B)"},{b:"(-?)(\\b0[xX][a-fA-F0-9']+|(\\b[\\d']+(\\.[\\d']*)?|\\.[\\d']+)([eE][-+]?[\\d']+)?)"}],r:0},n={cN:"meta",b:/#\s*[a-z]+\b/,e:/$/,k:{"meta-keyword":"if else elif endif define undef warning error line pragma ifdef ifndef include"},c:[{b:/\\\n/,r:0},e.inherit(r,{cN:"meta-string"}),{cN:"meta-string",b:/<[^\n>]*>/,e:/$/,i:"\\n"},e.CLCM,e.CBCM]},i=e.IR+"\\s*\\(",s={keyword:"int float while private char catch import module export virtual operator sizeof dynamic_cast|10 typedef const_cast|10 const for static_cast|10 union namespace unsigned long volatile static protected bool template mutable if public friend do goto auto void enum else break extern using asm case typeid short reinterpret_cast|10 default double register explicit signed typename try this switch continue inline delete alignof constexpr decltype noexcept static_assert thread_local restrict _Bool complex _Complex _Imaginary atomic_bool atomic_char atomic_schar atomic_uchar atomic_short atomic_ushort atomic_int atomic_uint atomic_long atomic_ulong atomic_llong atomic_ullong new throw return and or not",built_in:"std string cin cout cerr clog stdin stdout stderr stringstream istringstream ostringstream auto_ptr deque list queue stack vector map set bitset multiset multimap unordered_set unordered_map unordered_multiset unordered_multimap array shared_ptr abort abs acos asin atan2 atan calloc ceil cosh cos exit exp fabs floor fmod fprintf fputs free frexp fscanf isalnum isalpha iscntrl isdigit isgraph islower isprint ispunct isspace isupper isxdigit tolower toupper labs ldexp log10 log malloc realloc memchr memcmp memcpy memset modf pow printf putchar puts scanf sinh sin snprintf sprintf sqrt sscanf strcat strchr strcmp strcpy strcspn strlen strncat strncmp strncpy strpbrk strrchr strspn strstr tanh tan vfprintf vprintf vsprintf endl initializer_list unique_ptr",literal:"true false nullptr NULL"},c=[t,e.CLCM,e.CBCM,a,r];return{aliases:["c","cc","h","c++","h++","hpp","hh","hxx","cxx"],k:s,i:"",k:s,c:["self",t]},{b:e.IR+"::",k:s},{v:[{b:/=/,e:/;/},{b:/\(/,e:/\)/},{bK:"new throw return else",e:/;/}],k:s,c:c.concat([{b:/\(/,e:/\)/,k:s,c:c.concat(["self"]),r:0}]),r:0},{cN:"function",b:"("+e.IR+"[\\*&\\s]+)+"+i,rB:!0,e:/[{;=]/,eE:!0,k:s,i:/[^\w\s\*&]/,c:[{b:i,rB:!0,c:[e.TM],r:0},{cN:"params",b:/\(/,e:/\)/,k:s,r:0,c:[e.CLCM,e.CBCM,r,a,t,{b:/\(/,e:/\)/,k:s,r:0,c:["self",e.CLCM,e.CBCM,r,a,t]}]},e.CLCM,e.CBCM,n]},{cN:"class",bK:"class struct",e:/[{;:]/,c:[{b://,c:["self"]},e.TM]}]),exports:{preprocessor:n,strings:r,k:s}}}),n.registerLanguage("cs",function(e){var t={keyword:"abstract as base bool break byte case catch char checked const continue decimal default delegate do double enum event explicit extern finally fixed float for foreach goto if implicit in int interface internal is lock long nameof object operator out override params private protected public readonly ref sbyte sealed short sizeof stackalloc static string struct switch this try typeof uint ulong unchecked unsafe ushort using virtual void volatile while add alias ascending async await by descending dynamic equals from get global group into join let on orderby partial remove select set value var where yield",literal:"null false true"},r={cN:"number",v:[{b:"\\b(0b[01']+)"},{b:"(-?)\\b([\\d']+(\\.[\\d']*)?|\\.[\\d']+)(u|U|l|L|ul|UL|f|F|b|B)"},{b:"(-?)(\\b0[xX][a-fA-F0-9']+|(\\b[\\d']+(\\.[\\d']*)?|\\.[\\d']+)([eE][-+]?[\\d']+)?)"}],r:0},a={cN:"string",b:'@"',e:'"',c:[{b:'""'}]},n=e.inherit(a,{i:/\n/}),i={cN:"subst",b:"{",e:"}",k:t},s=e.inherit(i,{i:/\n/}),c={cN:"string",b:/\$"/,e:'"',i:/\n/,c:[{b:"{{"},{b:"}}"},e.BE,s]},o={cN:"string",b:/\$@"/,e:'"',c:[{b:"{{"},{b:"}}"},{b:'""'},i]},l=e.inherit(o,{i:/\n/,c:[{b:"{{"},{b:"}}"},{b:'""'},s]});i.c=[o,c,a,e.ASM,e.QSM,r,e.CBCM],s.c=[l,c,n,e.ASM,e.QSM,r,e.inherit(e.CBCM,{i:/\n/})];var u={v:[o,c,a,e.ASM,e.QSM]},d=e.IR+"(<"+e.IR+"(\\s*,\\s*"+e.IR+")*>)?(\\[\\])?";return{aliases:["csharp","c#"],k:t,i:/::/,c:[e.C("///","$",{rB:!0,c:[{cN:"doctag",v:[{b:"///",r:0},{b:"\x3c!--|--\x3e"},{b:""}]}]}),e.CLCM,e.CBCM,{cN:"meta",b:"#",e:"$",k:{"meta-keyword":"if else elif endif define undef warning error line region endregion pragma checksum"}},u,r,{bK:"class interface",e:/[{;=]/,i:/[^\s:,]/,c:[e.TM,e.CLCM,e.CBCM]},{bK:"namespace",e:/[{;=]/,i:/[^\s:]/,c:[e.inherit(e.TM,{b:"[a-zA-Z](\\.?\\w)*"}),e.CLCM,e.CBCM]},{cN:"meta",b:"^\\s*\\[",eB:!0,e:"\\]",eE:!0,c:[{cN:"meta-string",b:/"/,e:/"/}]},{bK:"new return throw await else",r:0},{cN:"function",b:"("+d+"\\s+)+"+e.IR+"\\s*\\(",rB:!0,e:/\s*[{;=]/,eE:!0,k:t,c:[{b:e.IR+"\\s*\\(",rB:!0,c:[e.TM],r:0},{cN:"params",b:/\(/,e:/\)/,eB:!0,eE:!0,k:t,r:0,c:[u,r,e.CBCM]},e.CLCM,e.CBCM]}]}}),n.registerLanguage("css",function(e){var t={b:/(?:[A-Z\_\.\-]+|--[a-zA-Z0-9_-]+)\s*:/,rB:!0,e:";",eW:!0,c:[{cN:"attribute",b:/\S/,e:":",eE:!0,starts:{eW:!0,eE:!0,c:[{b:/[\w-]+\(/,rB:!0,c:[{cN:"built_in",b:/[\w-]+/},{b:/\(/,e:/\)/,c:[e.ASM,e.QSM]}]},e.CSSNM,e.QSM,e.ASM,e.CBCM,{cN:"number",b:"#[0-9A-Fa-f]+"},{cN:"meta",b:"!important"}]}}]};return{cI:!0,i:/[=\/|'\$]/,c:[e.CBCM,{cN:"selector-id",b:/#[A-Za-z0-9_-]+/},{cN:"selector-class",b:/\.[A-Za-z0-9_-]+/},{cN:"selector-attr",b:/\[/,e:/\]/,i:"$"},{cN:"selector-pseudo",b:/:(:)?[a-zA-Z0-9\_\-\+\(\)"'.]+/},{b:"@(font-face|page)",l:"[a-z-]+",k:"font-face page"},{b:"@",e:"[{;]",i:/:/,c:[{cN:"keyword",b:/\w+/},{b:/\s/,eW:!0,eE:!0,r:0,c:[e.ASM,e.QSM,e.CSSNM]}]},{cN:"selector-tag",b:"[a-zA-Z-][a-zA-Z0-9_-]*",r:0},{b:"{",e:"}",i:/\S/,c:[e.CBCM,t]}]}}),n.registerLanguage("diff",function(e){return{aliases:["patch"],c:[{cN:"meta",r:10,v:[{b:/^@@ +\-\d+,\d+ +\+\d+,\d+ +@@$/},{b:/^\*\*\* +\d+,\d+ +\*\*\*\*$/},{b:/^\-\-\- +\d+,\d+ +\-\-\-\-$/}]},{cN:"comment",v:[{b:/Index: /,e:/$/},{b:/={3,}/,e:/$/},{b:/^\-{3}/,e:/$/},{b:/^\*{3} /,e:/$/},{b:/^\+{3}/,e:/$/},{b:/\*{5}/,e:/\*{5}$/}]},{cN:"addition",b:"^\\+",e:"$"},{cN:"deletion",b:"^\\-",e:"$"},{cN:"addition",b:"^\\!",e:"$"}]}}),n.registerLanguage("http",function(e){var t="HTTP/[0-9\\.]+";return{aliases:["https"],i:"\\S",c:[{b:"^"+t,e:"$",c:[{cN:"number",b:"\\b\\d{3}\\b"}]},{b:"^[A-Z]+ (.*?) "+t+"$",rB:!0,e:"$",c:[{cN:"string",b:" ",e:" ",eB:!0,eE:!0},{b:t},{cN:"keyword",b:"[A-Z]+"}]},{cN:"attribute",b:"^\\w",e:": ",eE:!0,i:"\\n|\\s|=",starts:{e:"$",r:0}},{b:"\\n\\n",starts:{sL:[],eW:!0}}]}}),n.registerLanguage("ini",function(e){var t={cN:"string",c:[e.BE],v:[{b:"'''",e:"'''",r:10},{b:'"""',e:'"""',r:10},{b:'"',e:'"'},{b:"'",e:"'"}]};return{aliases:["toml"],cI:!0,i:/\S/,c:[e.C(";","$"),e.HCM,{cN:"section",b:/^\s*\[+/,e:/\]+/},{b:/^[a-z0-9\[\]_\.-]+\s*=\s*/,e:"$",rB:!0,c:[{cN:"attr",b:/[a-z0-9\[\]_\.-]+/},{b:/=/,eW:!0,r:0,c:[e.C(";","$"),e.HCM,{cN:"literal",b:/\bon|off|true|false|yes|no\b/},{cN:"variable",v:[{b:/\$[\w\d"][\w\d_]*/},{b:/\$\{(.*?)}/}]},t,{cN:"number",b:/([\+\-]+)?[\d]+_[\d_]+/},e.NM]}]}]}}),n.registerLanguage("java",function(e){var t="false synchronized int abstract float private char boolean var static null if const for true while long strictfp finally protected import native final void enum else break transient catch instanceof byte super volatile case assert short package default double public try this switch continue throws protected public private module requires exports do",r={cN:"number",b:"\\b(0[bB]([01]+[01_]+[01]+|[01]+)|0[xX]([a-fA-F0-9]+[a-fA-F0-9_]+[a-fA-F0-9]+|[a-fA-F0-9]+)|(([\\d]+[\\d_]+[\\d]+|[\\d]+)(\\.([\\d]+[\\d_]+[\\d]+|[\\d]+))?|\\.([\\d]+[\\d_]+[\\d]+|[\\d]+))([eE][-+]?\\d+)?)[lLfF]?",r:0};return{aliases:["jsp"],k:t,i:/<\/|#/,c:[e.C("/\\*\\*","\\*/",{r:0,c:[{b:/\w+@/,r:0},{cN:"doctag",b:"@[A-Za-z]+"}]}),e.CLCM,e.CBCM,e.ASM,e.QSM,{cN:"class",bK:"class interface",e:/[{;=]/,eE:!0,k:"class interface",i:/[:"\[\]]/,c:[{bK:"extends implements"},e.UTM]},{bK:"new throw return else",r:0},{cN:"function",b:"([À-ʸa-zA-Z_$][À-ʸa-zA-Z_$0-9]*(<[À-ʸa-zA-Z_$][À-ʸa-zA-Z_$0-9]*(\\s*,\\s*[À-ʸa-zA-Z_$][À-ʸa-zA-Z_$0-9]*)*>)?\\s+)+"+e.UIR+"\\s*\\(",rB:!0,e:/[{;=]/,eE:!0,k:t,c:[{b:e.UIR+"\\s*\\(",rB:!0,r:0,c:[e.UTM]},{cN:"params",b:/\(/,e:/\)/,k:t,r:0,c:[e.ASM,e.QSM,e.CNM,e.CBCM]},e.CLCM,e.CBCM]},r,{cN:"meta",b:"@[A-Za-z]+"}]}}),n.registerLanguage("javascript",function(e){var t="[A-Za-z$_][0-9A-Za-z$_]*",r={keyword:"in of if for while finally var new function do return void else break catch instanceof with throw case default try this switch continue typeof delete let yield const export super debugger as async await static import from as",literal:"true false null undefined NaN Infinity",built_in:"eval isFinite isNaN parseFloat parseInt decodeURI decodeURIComponent encodeURI encodeURIComponent escape unescape Object Function Boolean Error EvalError InternalError RangeError ReferenceError StopIteration SyntaxError TypeError URIError Number Math Date String RegExp Array Float32Array Float64Array Int16Array Int32Array Int8Array Uint16Array Uint32Array Uint8Array Uint8ClampedArray ArrayBuffer DataView JSON Intl arguments require module console window document Symbol Set Map WeakSet WeakMap Proxy Reflect Promise"},a={cN:"number",v:[{b:"\\b(0[bB][01]+)"},{b:"\\b(0[oO][0-7]+)"},{b:e.CNR}],r:0},n={cN:"subst",b:"\\$\\{",e:"\\}",k:r,c:[]},i={b:"html`",e:"",starts:{e:"`",rE:!1,c:[e.BE,n],sL:"xml"}},s={b:"css`",e:"",starts:{e:"`",rE:!1,c:[e.BE,n],sL:"css"}},c={cN:"string",b:"`",e:"`",c:[e.BE,n]};n.c=[e.ASM,e.QSM,i,s,c,a,e.RM];var o=n.c.concat([e.CBCM,e.CLCM]);return{aliases:["js","jsx"],k:r,c:[{cN:"meta",r:10,b:/^\s*['"]use (strict|asm)['"]/},{cN:"meta",b:/^#!/,e:/$/},e.ASM,e.QSM,i,s,c,e.CLCM,e.CBCM,a,{b:/[{,]\s*/,r:0,c:[{b:t+"\\s*:",rB:!0,r:0,c:[{cN:"attr",b:t,r:0}]}]},{b:"("+e.RSR+"|\\b(case|return|throw)\\b)\\s*",k:"return throw case",c:[e.CLCM,e.CBCM,e.RM,{cN:"function",b:"(\\(.*?\\)|"+t+")\\s*=>",rB:!0,e:"\\s*=>",c:[{cN:"params",v:[{b:t},{b:/\(\s*\)/},{b:/\(/,e:/\)/,eB:!0,eE:!0,k:r,c:o}]}]},{cN:"",b:/\s/,e:/\s*/,skip:!0},{b://,sL:"xml",c:[{b:/<[A-Za-z0-9\\._:-]+\s*\/>/,skip:!0},{b:/<[A-Za-z0-9\\._:-]+/,e:/(\/[A-Za-z0-9\\._:-]+|[A-Za-z0-9\\._:-]+\/)>/,skip:!0,c:[{b:/<[A-Za-z0-9\\._:-]+\s*\/>/,skip:!0},"self"]}]}],r:0},{cN:"function",bK:"function",e:/\{/,eE:!0,c:[e.inherit(e.TM,{b:t}),{cN:"params",b:/\(/,e:/\)/,eB:!0,eE:!0,c:o}],i:/\[|%/},{b:/\$[(.]/},e.METHOD_GUARD,{cN:"class",bK:"class",e:/[{;=]/,eE:!0,i:/[:"\[\]]/,c:[{bK:"extends"},e.UTM]},{bK:"constructor get set",e:/\{/,eE:!0}],i:/#(?!!)/}}),n.registerLanguage("json",function(e){var t={literal:"true false null"},r=[e.QSM,e.CNM],a={e:",",eW:!0,eE:!0,c:r,k:t},n={b:"{",e:"}",c:[{cN:"attr",b:/"/,e:/"/,c:[e.BE],i:"\\n"},e.inherit(a,{b:/:/})],i:"\\S"},i={b:"\\[",e:"\\]",c:[e.inherit(a)],i:"\\S"};return r.splice(r.length,0,n,i),{c:r,k:t,i:"\\S"}}),n.registerLanguage("makefile",function(e){var t={cN:"variable",v:[{b:"\\$\\("+e.UIR+"\\)",c:[e.BE]},{b:/\$[@%`]+/}]}]}]};return{aliases:["html","xhtml","rss","atom","xjb","xsd","xsl","plist","wsf"],cI:!0,c:[{cN:"meta",b:"",r:10,c:[{b:"\\[",e:"\\]"}]},e.C("\x3c!--","--\x3e",{r:10}),{b:"<\\!\\[CDATA\\[",e:"\\]\\]>",r:10},{cN:"meta",b:/<\?xml/,e:/\?>/,r:10},{b:/<\?(php)?/,e:/\?>/,sL:"php",c:[{b:"/\\*",e:"\\*/",skip:!0},{b:'b"',e:'"',skip:!0},{b:"b'",e:"'",skip:!0},e.inherit(e.ASM,{i:null,cN:null,c:null,skip:!0}),e.inherit(e.QSM,{i:null,cN:null,c:null,skip:!0})]},{cN:"tag",b:"|$)",e:">",k:{name:"style"},c:[t],starts:{e:"",rE:!0,sL:["css","xml"]}},{cN:"tag",b:"|$)",e:">",k:{name:"script"},c:[t],starts:{e:"<\/script>",rE:!0,sL:["actionscript","javascript","handlebars","xml","vbscript"]}},{cN:"tag",b:"",c:[{cN:"name",b:/[^\/><\s]+/,r:0},t]}]}}),n.registerLanguage("markdown",function(e){return{aliases:["md","mkdown","mkd"],c:[{cN:"section",v:[{b:"^#{1,6}",e:"$"},{b:"^.+?\\n[=-]{2,}$"}]},{b:"<",e:">",sL:"xml",r:0},{cN:"bullet",b:"^\\s*([*+-]|(\\d+\\.))\\s+"},{cN:"strong",b:"[*_]{2}.+?[*_]{2}"},{cN:"emphasis",v:[{b:"\\*.+?\\*"},{b:"_.+?_",r:0}]},{cN:"quote",b:"^>\\s+",e:"$"},{cN:"code",v:[{b:"^```w*s*$",e:"^```s*$"},{b:"`.+?`"},{b:"^( {4}|\t)",e:"$",r:0}]},{b:"^[-\\*]{3,}",e:"$"},{b:"\\[.+?\\][\\(\\[].*?[\\)\\]]",rB:!0,c:[{cN:"string",b:"\\[",e:"\\]",eB:!0,rE:!0,r:0},{cN:"link",b:"\\]\\(",e:"\\)",eB:!0,eE:!0},{cN:"symbol",b:"\\]\\[",e:"\\]",eB:!0,eE:!0}],r:10},{b:/^\[[^\n]+\]:/,rB:!0,c:[{cN:"symbol",b:/\[/,e:/\]/,eB:!0,eE:!0},{cN:"link",b:/:\s*/,e:/$/,eB:!0}]}]}}),n.registerLanguage("nginx",function(e){var t={cN:"variable",v:[{b:/\$\d+/},{b:/\$\{/,e:/}/},{b:"[\\$\\@]"+e.UIR}]},r={eW:!0,l:"[a-z/_]+",k:{literal:"on off yes no true false none blocked debug info notice warn error crit select break last permanent redirect kqueue rtsig epoll poll /dev/poll"},r:0,i:"=>",c:[e.HCM,{cN:"string",c:[e.BE,t],v:[{b:/"/,e:/"/},{b:/'/,e:/'/}]},{b:"([a-z]+):/",e:"\\s",eW:!0,eE:!0,c:[t]},{cN:"regexp",c:[e.BE,t],v:[{b:"\\s\\^",e:"\\s|{|;",rE:!0},{b:"~\\*?\\s+",e:"\\s|{|;",rE:!0},{b:"\\*(\\.[a-z\\-]+)+"},{b:"([a-z\\-]+\\.)+\\*"}]},{cN:"number",b:"\\b\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}(:\\d{1,5})?\\b"},{cN:"number",b:"\\b\\d+[kKmMgGdshdwy]*\\b",r:0},t]};return{aliases:["nginxconf"],c:[e.HCM,{b:e.UIR+"\\s+{",rB:!0,e:"{",c:[{cN:"section",b:e.UIR}],r:0},{b:e.UIR+"\\s",e:";|{",rB:!0,c:[{cN:"attribute",b:e.UIR,starts:r}],r:0}],i:"[^\\s\\}]"}}),n.registerLanguage("objectivec",function(e){var t=/[a-zA-Z@][a-zA-Z0-9_]*/,r="@interface @class @protocol @implementation";return{aliases:["mm","objc","obj-c"],k:{keyword:"int float while char export sizeof typedef const struct for union unsigned long volatile static bool mutable if do return goto void enum else break extern asm case short default double register explicit signed typename this switch continue wchar_t inline readonly assign readwrite self @synchronized id typeof nonatomic super unichar IBOutlet IBAction strong weak copy in out inout bycopy byref oneway __strong __weak __block __autoreleasing @private @protected @public @try @property @end @throw @catch @finally @autoreleasepool @synthesize @dynamic @selector @optional @required @encode @package @import @defs @compatibility_alias __bridge __bridge_transfer __bridge_retained __bridge_retain __covariant __contravariant __kindof _Nonnull _Nullable _Null_unspecified __FUNCTION__ __PRETTY_FUNCTION__ __attribute__ getter setter retain unsafe_unretained nonnull nullable null_unspecified null_resettable class instancetype NS_DESIGNATED_INITIALIZER NS_UNAVAILABLE NS_REQUIRES_SUPER NS_RETURNS_INNER_POINTER NS_INLINE NS_AVAILABLE NS_DEPRECATED NS_ENUM NS_OPTIONS NS_SWIFT_UNAVAILABLE NS_ASSUME_NONNULL_BEGIN NS_ASSUME_NONNULL_END NS_REFINED_FOR_SWIFT NS_SWIFT_NAME NS_SWIFT_NOTHROW NS_DURING NS_HANDLER NS_ENDHANDLER NS_VALUERETURN NS_VOIDRETURN",literal:"false true FALSE TRUE nil YES NO NULL",built_in:"BOOL dispatch_once_t dispatch_queue_t dispatch_sync dispatch_async dispatch_once"},l:t,i:""}]}]},{cN:"class",b:"("+r.split(" ").join("|")+")\\b",e:"({|$)",eE:!0,k:r,l:t,c:[e.UTM]},{b:"\\."+e.UIR,r:0}]}}),n.registerLanguage("perl",function(e){var t="getpwent getservent quotemeta msgrcv scalar kill dbmclose undef lc ma syswrite tr send umask sysopen shmwrite vec qx utime local oct semctl localtime readpipe do return format read sprintf dbmopen pop getpgrp not getpwnam rewinddir qqfileno qw endprotoent wait sethostent bless s|0 opendir continue each sleep endgrent shutdown dump chomp connect getsockname die socketpair close flock exists index shmgetsub for endpwent redo lstat msgctl setpgrp abs exit select print ref gethostbyaddr unshift fcntl syscall goto getnetbyaddr join gmtime symlink semget splice x|0 getpeername recv log setsockopt cos last reverse gethostbyname getgrnam study formline endhostent times chop length gethostent getnetent pack getprotoent getservbyname rand mkdir pos chmod y|0 substr endnetent printf next open msgsnd readdir use unlink getsockopt getpriority rindex wantarray hex system getservbyport endservent int chr untie rmdir prototype tell listen fork shmread ucfirst setprotoent else sysseek link getgrgid shmctl waitpid unpack getnetbyname reset chdir grep split require caller lcfirst until warn while values shift telldir getpwuid my getprotobynumber delete and sort uc defined srand accept package seekdir getprotobyname semop our rename seek if q|0 chroot sysread setpwent no crypt getc chown sqrt write setnetent setpriority foreach tie sin msgget map stat getlogin unless elsif truncate exec keys glob tied closedirioctl socket readlink eval xor readline binmode setservent eof ord bind alarm pipe atan2 getgrent exp time push setgrent gt lt or ne m|0 break given say state when",r={cN:"subst",b:"[$@]\\{",e:"\\}",k:t},a={b:"->{",e:"}"},n={v:[{b:/\$\d/},{b:/[\$%@](\^\w\b|#\w+(::\w+)*|{\w+}|\w+(::\w*)*)/},{b:/[\$%@][^\s\w{]/,r:0}]},i=[e.BE,r,n],s=[n,e.HCM,e.C("^\\=\\w","\\=cut",{eW:!0}),a,{cN:"string",c:i,v:[{b:"q[qwxr]?\\s*\\(",e:"\\)",r:5},{b:"q[qwxr]?\\s*\\[",e:"\\]",r:5},{b:"q[qwxr]?\\s*\\{",e:"\\}",r:5},{b:"q[qwxr]?\\s*\\|",e:"\\|",r:5},{b:"q[qwxr]?\\s*\\<",e:"\\>",r:5},{b:"qw\\s+q",e:"q",r:5},{b:"'",e:"'",c:[e.BE]},{b:'"',e:'"'},{b:"`",e:"`",c:[e.BE]},{b:"{\\w+}",c:[],r:0},{b:"-?\\w+\\s*\\=\\>",c:[],r:0}]},{cN:"number",b:"(\\b0[0-7_]+)|(\\b0x[0-9a-fA-F_]+)|(\\b[1-9][0-9_]*(\\.[0-9_]+)?)|[0_]\\b",r:0},{b:"(\\/\\/|"+e.RSR+"|\\b(split|return|print|reverse|grep)\\b)\\s*",k:"split return print reverse grep",r:0,c:[e.HCM,{cN:"regexp",b:"(s|tr|y)/(\\\\.|[^/])*/(\\\\.|[^/])*/[a-z]*",r:10},{cN:"regexp",b:"(m|qr)?/",e:"/[a-z]*",c:[e.BE],r:0}]},{cN:"function",bK:"sub",e:"(\\s*\\(.*?\\))?[;{]",eE:!0,r:5,c:[e.TM]},{b:"-\\w\\b",r:0},{b:"^__DATA__$",e:"^__END__$",sL:"mojolicious",c:[{b:"^@@.*",e:"$",cN:"comment"}]}];return r.c=s,{aliases:["pl","pm"],l:/[\w\.]+/,k:t,c:a.c=s}}),n.registerLanguage("php",function(e){var t={b:"\\$+[a-zA-Z_-ÿ][a-zA-Z0-9_-ÿ]*"},r={cN:"meta",b:/<\?(php)?|\?>/},a={cN:"string",c:[e.BE,r],v:[{b:'b"',e:'"'},{b:"b'",e:"'"},e.inherit(e.ASM,{i:null}),e.inherit(e.QSM,{i:null})]},n={v:[e.BNM,e.CNM]};return{aliases:["php","php3","php4","php5","php6","php7"],cI:!0,k:"and include_once list abstract global private echo interface as static endswitch array null if endwhile or const for endforeach self var while isset public protected exit foreach throw elseif include __FILE__ empty require_once do xor return parent clone use __CLASS__ __LINE__ else break print eval new catch __METHOD__ case exception default die require __FUNCTION__ enddeclare final try switch continue endfor endif declare unset true false trait goto instanceof insteadof __DIR__ __NAMESPACE__ yield finally",c:[e.HCM,e.C("//","$",{c:[r]}),e.C("/\\*","\\*/",{c:[{cN:"doctag",b:"@[A-Za-z]+"}]}),e.C("__halt_compiler.+?;",!1,{eW:!0,k:"__halt_compiler",l:e.UIR}),{cN:"string",b:/<<<['"]?\w+['"]?$/,e:/^\w+;?$/,c:[e.BE,{cN:"subst",v:[{b:/\$\w+/},{b:/\{\$/,e:/\}/}]}]},r,{cN:"keyword",b:/\$this\b/},t,{b:/(::|->)+[a-zA-Z_\x7f-\xff][a-zA-Z0-9_\x7f-\xff]*/},{cN:"function",bK:"function",e:/[;{]/,eE:!0,i:"\\$|\\[|%",c:[e.UTM,{cN:"params",b:"\\(",e:"\\)",c:["self",t,e.CBCM,a,n]}]},{cN:"class",bK:"class interface",e:"{",eE:!0,i:/[:\(\$"]/,c:[{bK:"extends implements"},e.UTM]},{bK:"namespace",e:";",i:/[\.']/,c:[e.UTM]},{bK:"use",e:";",c:[e.UTM]},{b:"=>"},a,n]}}),n.registerLanguage("properties",function(e){var t="[ \\t\\f]*",r="("+t+"[:=]"+t+"|[ \\t\\f]+)",a="([^\\\\\\W:= \\t\\f\\n]|\\\\.)+",n="([^\\\\:= \\t\\f\\n]|\\\\.)+",i={e:r,r:0,starts:{cN:"string",e:/$/,r:0,c:[{b:"\\\\\\n"}]}};return{cI:!0,i:/\S/,c:[e.C("^\\s*[!#]","$"),{b:a+r,rB:!0,c:[{cN:"attr",b:a,endsParent:!0,r:0}],starts:i},{b:n+r,rB:!0,r:0,c:[{cN:"meta",b:n,endsParent:!0,r:0}],starts:i},{cN:"attr",r:0,b:n+t+"$"}]}}),n.registerLanguage("python",function(e){var t={keyword:"and elif is global as in if from raise for except finally print import pass return exec else break not with class assert yield try while continue del or def lambda async await nonlocal|10",built_in:"Ellipsis NotImplemented",literal:"False None True"},r={cN:"meta",b:/^(>>>|\.\.\.) /},a={cN:"subst",b:/\{/,e:/\}/,k:t,i:/#/},n={cN:"string",c:[e.BE],v:[{b:/(u|b)?r?'''/,e:/'''/,c:[e.BE,r],r:10},{b:/(u|b)?r?"""/,e:/"""/,c:[e.BE,r],r:10},{b:/(fr|rf|f)'''/,e:/'''/,c:[e.BE,r,a]},{b:/(fr|rf|f)"""/,e:/"""/,c:[e.BE,r,a]},{b:/(u|r|ur)'/,e:/'/,r:10},{b:/(u|r|ur)"/,e:/"/,r:10},{b:/(b|br)'/,e:/'/},{b:/(b|br)"/,e:/"/},{b:/(fr|rf|f)'/,e:/'/,c:[e.BE,a]},{b:/(fr|rf|f)"/,e:/"/,c:[e.BE,a]},e.ASM,e.QSM]},i={cN:"number",r:0,v:[{b:e.BNR+"[lLjJ]?"},{b:"\\b(0o[0-7]+)[lLjJ]?"},{b:e.CNR+"[lLjJ]?"}]},s={cN:"params",b:/\(/,e:/\)/,c:["self",r,i,n]};return a.c=[n,i,r],{aliases:["py","gyp","ipython"],k:t,i:/(<\/|->|\?)|=>/,c:[r,i,n,e.HCM,{v:[{cN:"function",bK:"def"},{cN:"class",bK:"class"}],e:/:/,i:/[${=;\n,]/,c:[e.UTM,s,{b:/->/,eW:!0,k:"None"}]},{cN:"meta",b:/^[\t ]*@/,e:/$/},{b:/\b(print|exec)\(/}]}}),n.registerLanguage("ruby",function(e){var t="[a-zA-Z_]\\w*[!?=]?|[-+~]\\@|<<|>>|=~|===?|<=>|[<>]=?|\\*\\*|[-/+%^&*~`|]|\\[\\]=?",r={keyword:"and then defined module in return redo if BEGIN retry end for self when next until do begin unless END rescue else break undef not super class case require yield alias while ensure elsif or include attr_reader attr_writer attr_accessor",literal:"true false nil"},a={cN:"doctag",b:"@[A-Za-z]+"},n={b:"#<",e:">"},i=[e.C("#","$",{c:[a]}),e.C("^\\=begin","^\\=end",{c:[a],r:10}),e.C("^__END__","\\n$")],s={cN:"subst",b:"#\\{",e:"}",k:r},c={cN:"string",c:[e.BE,s],v:[{b:/'/,e:/'/},{b:/"/,e:/"/},{b:/`/,e:/`/},{b:"%[qQwWx]?\\(",e:"\\)"},{b:"%[qQwWx]?\\[",e:"\\]"},{b:"%[qQwWx]?{",e:"}"},{b:"%[qQwWx]?<",e:">"},{b:"%[qQwWx]?/",e:"/"},{b:"%[qQwWx]?%",e:"%"},{b:"%[qQwWx]?-",e:"-"},{b:"%[qQwWx]?\\|",e:"\\|"},{b:/\B\?(\\\d{1,3}|\\x[A-Fa-f0-9]{1,2}|\\u[A-Fa-f0-9]{4}|\\?\S)\b/},{b:/<<[-~]?'?(\w+)(?:.|\n)*?\n\s*\1\b/,rB:!0,c:[{b:/<<[-~]?'?/},{b:/\w+/,endSameAsBegin:!0,c:[e.BE,s]}]}]},o={cN:"params",b:"\\(",e:"\\)",endsParent:!0,k:r},l=[c,n,{cN:"class",bK:"class module",e:"$|;",i:/=/,c:[e.inherit(e.TM,{b:"[A-Za-z_]\\w*(::\\w+)*(\\?|\\!)?"}),{b:"<\\s*",c:[{b:"("+e.IR+"::)?"+e.IR}]}].concat(i)},{cN:"function",bK:"def",e:"$|;",c:[e.inherit(e.TM,{b:t}),o].concat(i)},{b:e.IR+"::"},{cN:"symbol",b:e.UIR+"(\\!|\\?)?:",r:0},{cN:"symbol",b:":(?!\\s)",c:[c,{b:t}],r:0},{cN:"number",b:"(\\b0[0-7_]+)|(\\b0x[0-9a-fA-F_]+)|(\\b[1-9][0-9_]*(\\.[0-9_]+)?)|[0_]\\b",r:0},{b:"(\\$\\W)|((\\$|\\@\\@?)(\\w+))"},{cN:"params",b:/\|/,e:/\|/,k:r},{b:"("+e.RSR+"|unless)\\s*",k:"unless",c:[n,{cN:"regexp",c:[e.BE,s],i:/\n/,v:[{b:"/",e:"/[a-z]*"},{b:"%r{",e:"}[a-z]*"},{b:"%r\\(",e:"\\)[a-z]*"},{b:"%r!",e:"![a-z]*"},{b:"%r\\[",e:"\\][a-z]*"}]}].concat(i),r:0}].concat(i);s.c=l;var u=[{b:/^\s*=>/,starts:{e:"$",c:o.c=l}},{cN:"meta",b:"^([>?]>|[\\w#]+\\(\\w+\\):\\d+:\\d+>|(\\w+-)?\\d+\\.\\d+\\.\\d(p\\d+)?[^>]+>)",starts:{e:"$",c:l}}];return{aliases:["rb","gemspec","podspec","thor","irb"],k:r,i:/\/\*/,c:i.concat(u).concat(l)}}),n.registerLanguage("shell",function(e){return{aliases:["console"],c:[{cN:"meta",b:"^\\s{0,3}[\\w\\d\\[\\]()@-]*[>%$#]",starts:{e:"$",sL:"bash"}}]}}),n.registerLanguage("sql",function(e){var t=e.C("--","$");return{cI:!0,i:/[<>{}*]/,c:[{bK:"begin end start commit rollback savepoint lock alter create drop rename call delete do handler insert load replace select truncate update set show pragma grant merge describe use explain help declare prepare execute deallocate release unlock purge reset change stop analyze cache flush optimize repair kill install uninstall checksum restore check backup revoke comment values with",e:/;/,eW:!0,l:/[\w\.]+/,k:{keyword:"as abort abs absolute acc acce accep accept access accessed accessible account acos action activate add addtime admin administer advanced advise aes_decrypt aes_encrypt after agent aggregate ali alia alias all allocate allow alter always analyze ancillary and anti any anydata anydataset anyschema anytype apply archive archived archivelog are as asc ascii asin assembly assertion associate asynchronous at atan atn2 attr attri attrib attribu attribut attribute attributes audit authenticated authentication authid authors auto autoallocate autodblink autoextend automatic availability avg backup badfile basicfile before begin beginning benchmark between bfile bfile_base big bigfile bin binary_double binary_float binlog bit_and bit_count bit_length bit_or bit_xor bitmap blob_base block blocksize body both bound bucket buffer_cache buffer_pool build bulk by byte byteordermark bytes cache caching call calling cancel capacity cascade cascaded case cast catalog category ceil ceiling chain change changed char_base char_length character_length characters characterset charindex charset charsetform charsetid check checksum checksum_agg child choose chr chunk class cleanup clear client clob clob_base clone close cluster_id cluster_probability cluster_set clustering coalesce coercibility col collate collation collect colu colum column column_value columns columns_updated comment commit compact compatibility compiled complete composite_limit compound compress compute concat concat_ws concurrent confirm conn connec connect connect_by_iscycle connect_by_isleaf connect_by_root connect_time connection consider consistent constant constraint constraints constructor container content contents context contributors controlfile conv convert convert_tz corr corr_k corr_s corresponding corruption cos cost count count_big counted covar_pop covar_samp cpu_per_call cpu_per_session crc32 create creation critical cross cube cume_dist curdate current current_date current_time current_timestamp current_user cursor curtime customdatum cycle data database databases datafile datafiles datalength date_add date_cache date_format date_sub dateadd datediff datefromparts datename datepart datetime2fromparts day day_to_second dayname dayofmonth dayofweek dayofyear days db_role_change dbtimezone ddl deallocate declare decode decompose decrement decrypt deduplicate def defa defau defaul default defaults deferred defi defin define degrees delayed delegate delete delete_all delimited demand dense_rank depth dequeue des_decrypt des_encrypt des_key_file desc descr descri describ describe descriptor deterministic diagnostics difference dimension direct_load directory disable disable_all disallow disassociate discardfile disconnect diskgroup distinct distinctrow distribute distributed div do document domain dotnet double downgrade drop dumpfile duplicate duration each edition editionable editions element ellipsis else elsif elt empty enable enable_all enclosed encode encoding encrypt end end-exec endian enforced engine engines enqueue enterprise entityescaping eomonth error errors escaped evalname evaluate event eventdata events except exception exceptions exchange exclude excluding execu execut execute exempt exists exit exp expire explain explode export export_set extended extent external external_1 external_2 externally extract failed failed_login_attempts failover failure far fast feature_set feature_value fetch field fields file file_name_convert filesystem_like_logging final finish first first_value fixed flash_cache flashback floor flush following follows for forall force foreign form forma format found found_rows freelist freelists freepools fresh from from_base64 from_days ftp full function general generated get get_format get_lock getdate getutcdate global global_name globally go goto grant grants greatest group group_concat group_id grouping grouping_id groups gtid_subtract guarantee guard handler hash hashkeys having hea head headi headin heading heap help hex hierarchy high high_priority hosts hour hours http id ident_current ident_incr ident_seed identified identity idle_time if ifnull ignore iif ilike ilm immediate import in include including increment index indexes indexing indextype indicator indices inet6_aton inet6_ntoa inet_aton inet_ntoa infile initial initialized initially initrans inmemory inner innodb input insert install instance instantiable instr interface interleaved intersect into invalidate invisible is is_free_lock is_ipv4 is_ipv4_compat is_not is_not_null is_used_lock isdate isnull isolation iterate java join json json_exists keep keep_duplicates key keys kill language large last last_day last_insert_id last_value lateral lax lcase lead leading least leaves left len lenght length less level levels library like like2 like4 likec limit lines link list listagg little ln load load_file lob lobs local localtime localtimestamp locate locator lock locked log log10 log2 logfile logfiles logging logical logical_reads_per_call logoff logon logs long loop low low_priority lower lpad lrtrim ltrim main make_set makedate maketime managed management manual map mapping mask master master_pos_wait match matched materialized max maxextents maximize maxinstances maxlen maxlogfiles maxloghistory maxlogmembers maxsize maxtrans md5 measures median medium member memcompress memory merge microsecond mid migration min minextents minimum mining minus minute minutes minvalue missing mod mode model modification modify module monitoring month months mount move movement multiset mutex name name_const names nan national native natural nav nchar nclob nested never new newline next nextval no no_write_to_binlog noarchivelog noaudit nobadfile nocheck nocompress nocopy nocycle nodelay nodiscardfile noentityescaping noguarantee nokeep nologfile nomapping nomaxvalue nominimize nominvalue nomonitoring none noneditionable nonschema noorder nopr nopro noprom nopromp noprompt norely noresetlogs noreverse normal norowdependencies noschemacheck noswitch not nothing notice notnull notrim novalidate now nowait nth_value nullif nulls num numb numbe nvarchar nvarchar2 object ocicoll ocidate ocidatetime ociduration ociinterval ociloblocator ocinumber ociref ocirefcursor ocirowid ocistring ocitype oct octet_length of off offline offset oid oidindex old on online only opaque open operations operator optimal optimize option optionally or oracle oracle_date oradata ord ordaudio orddicom orddoc order ordimage ordinality ordvideo organization orlany orlvary out outer outfile outline output over overflow overriding package pad parallel parallel_enable parameters parent parse partial partition partitions pascal passing password password_grace_time password_lock_time password_reuse_max password_reuse_time password_verify_function patch path patindex pctincrease pctthreshold pctused pctversion percent percent_rank percentile_cont percentile_disc performance period period_add period_diff permanent physical pi pipe pipelined pivot pluggable plugin policy position post_transaction pow power pragma prebuilt precedes preceding precision prediction prediction_cost prediction_details prediction_probability prediction_set prepare present preserve prior priority private private_sga privileges procedural procedure procedure_analyze processlist profiles project prompt protection public publishingservername purge quarter query quick quiesce quota quotename radians raise rand range rank raw read reads readsize rebuild record records recover recovery recursive recycle redo reduced ref reference referenced references referencing refresh regexp_like register regr_avgx regr_avgy regr_count regr_intercept regr_r2 regr_slope regr_sxx regr_sxy reject rekey relational relative relaylog release release_lock relies_on relocate rely rem remainder rename repair repeat replace replicate replication required reset resetlogs resize resource respect restore restricted result result_cache resumable resume retention return returning returns reuse reverse revoke right rlike role roles rollback rolling rollup round row row_count rowdependencies rowid rownum rows rtrim rules safe salt sample save savepoint sb1 sb2 sb4 scan schema schemacheck scn scope scroll sdo_georaster sdo_topo_geometry search sec_to_time second seconds section securefile security seed segment select self semi sequence sequential serializable server servererror session session_user sessions_per_user set sets settings sha sha1 sha2 share shared shared_pool short show shrink shutdown si_averagecolor si_colorhistogram si_featurelist si_positionalcolor si_stillimage si_texture siblings sid sign sin size size_t sizes skip slave sleep smalldatetimefromparts smallfile snapshot some soname sort soundex source space sparse spfile split sql sql_big_result sql_buffer_result sql_cache sql_calc_found_rows sql_small_result sql_variant_property sqlcode sqldata sqlerror sqlname sqlstate sqrt square standalone standby start starting startup statement static statistics stats_binomial_test stats_crosstab stats_ks_test stats_mode stats_mw_test stats_one_way_anova stats_t_test_ stats_t_test_indep stats_t_test_one stats_t_test_paired stats_wsr_test status std stddev stddev_pop stddev_samp stdev stop storage store stored str str_to_date straight_join strcmp strict string struct stuff style subdate subpartition subpartitions substitutable substr substring subtime subtring_index subtype success sum suspend switch switchoffset switchover sync synchronous synonym sys sys_xmlagg sysasm sysaux sysdate sysdatetimeoffset sysdba sysoper system system_user sysutcdatetime table tables tablespace tablesample tan tdo template temporary terminated tertiary_weights test than then thread through tier ties time time_format time_zone timediff timefromparts timeout timestamp timestampadd timestampdiff timezone_abbr timezone_minute timezone_region to to_base64 to_date to_days to_seconds todatetimeoffset trace tracking transaction transactional translate translation treat trigger trigger_nestlevel triggers trim truncate try_cast try_convert try_parse type ub1 ub2 ub4 ucase unarchived unbounded uncompress under undo unhex unicode uniform uninstall union unique unix_timestamp unknown unlimited unlock unnest unpivot unrecoverable unsafe unsigned until untrusted unusable unused update updated upgrade upped upper upsert url urowid usable usage use use_stored_outlines user user_data user_resources users using utc_date utc_timestamp uuid uuid_short validate validate_password_strength validation valist value values var var_samp varcharc vari varia variab variabl variable variables variance varp varraw varrawc varray verify version versions view virtual visible void wait wallet warning warnings week weekday weekofyear wellformed when whene whenev wheneve whenever where while whitespace window with within without work wrapped xdb xml xmlagg xmlattributes xmlcast xmlcolattval xmlelement xmlexists xmlforest xmlindex xmlnamespaces xmlpi xmlquery xmlroot xmlschema xmlserialize xmltable xmltype xor year year_to_month years yearweek",literal:"true false null unknown",built_in:"array bigint binary bit blob bool boolean char character date dec decimal float int int8 integer interval number numeric real record serial serial8 smallint text time timestamp tinyint varchar varying void"},c:[{cN:"string",b:"'",e:"'",c:[e.BE,{b:"''"}]},{cN:"string",b:'"',e:'"',c:[e.BE,{b:'""'}]},{cN:"string",b:"`",e:"`",c:[e.BE]},e.CNM,e.CBCM,t,e.HCM]},e.CBCM,t,e.HCM]}}),n.registerLanguage("yaml",function(e){var t="true false yes no null",r="^[ \\-]*",a="[a-zA-Z_][\\w\\-]*",n={cN:"attr",v:[{b:r+a+":"},{b:r+'"'+a+'":'},{b:r+"'"+a+"':"}]},i={cN:"string",r:0,v:[{b:/'/,e:/'/},{b:/"/,e:/"/},{b:/\S+/}],c:[e.BE,{cN:"template-variable",v:[{b:"{{",e:"}}"},{b:"%{",e:"}"}]}]};return{cI:!0,aliases:["yml","YAML","yaml"],c:[n,{cN:"meta",b:"^---s*$",r:10},{cN:"string",b:"[\\|>] *$",rE:!0,c:i.c,e:n.v[0].b},{b:"<%[%=-]?",e:"[%-]?%>",sL:"ruby",eB:!0,eE:!0,r:0},{cN:"type",b:"!"+e.UIR},{cN:"type",b:"!!"+e.UIR},{cN:"meta",b:"&"+e.UIR+"$"},{cN:"meta",b:"\\*"+e.UIR+"$"},{cN:"bullet",b:"^ *-",r:0},e.HCM,{bK:t,k:{literal:t}},e.CNM,i]}}),n}); \ No newline at end of file diff --git a/tests/test262/main.rs b/tests/test262/main.rs deleted file mode 100644 index 8fe5690..0000000 --- a/tests/test262/main.rs +++ /dev/null @@ -1,870 +0,0 @@ -#![cfg(feature = "test_262")] - -use indicatif::{ParallelProgressIterator, ProgressBar, ProgressStyle}; -use rayon::iter::{IntoParallelRefIterator, ParallelIterator}; -use ressa::Parser; -use serde::{Deserialize, Serialize}; -use std::fs::File; -use std::io::BufWriter; -use std::{ - error::Error, - path::{Path, PathBuf}, -}; -static INCLUDED_FEATURES: &[&str] = &[ - "IsHTMLDDA", - "String.prototype.endsWith", - "Array.prototype.flat", - "template", - "super", - "for-of", - "Float64Array", - "Reflect.construct", - "Symbol.toStringTag", - "Intl.DateTimeFormat-formatRange", - "Symbol.split", - "DataView.prototype.getFloat32", - "globalThis", - "DataView.prototype.getUint32", - "Symbol.prototype.description", - "WeakRef", - "Int32Array", - "Uint8ClampedArray", - "String.fromCodePoint", - "SharedArrayBuffer", - "Intl.DateTimeFormat-fractionalSecondDigits", - "Uint8Array", - "rest-parameters", - "DataView.prototype.getInt8", - "Intl.DateTimeFormat-datetimestyle", - "DataView.prototype.setUint8", - "String.prototype.trimStart", - "caller", - "Uint16Array", - "Symbol.hasInstance", - "DataView.prototype.getInt16", - "string-trimming", - "optional-catch-binding", - "FinalizationGroup", - "Float32Array", - "Reflect.set", - "WeakSet", - "tail-call-optimization", - "String.prototype.matchAll", - "Symbol.unscopables", - "DataView.prototype.getInt32", - "Symbol.search", - "Intl.NumberFormat-unified", - "Symbol.species", - "Object.fromEntries", - "cross-realm", - "DataView.prototype.getFloat64", - "Symbol.isConcatSpreadable", - "String.prototype.trimEnd", - "Array.prototype.values", - "regexp-lookbehind", - "TypedArray", - "destructuring-assignment", - "Reflect.setPrototypeOf", - "regexp-dotall", - "u180e", - "Intl.RelativeTimeFormat", - "proxy-missing-checks", - "DataView.prototype.getUint16", - "Intl.ListFormat", - "Intl.DateTimeFormat-quarter", - "Reflect", - "Symbol.match", - "Intl.DateTimeFormat-dayPeriod", - "Object.is", - "Promise.allSettled", - "Symbol.replace", - "well-formed-json-stringify", - "Intl.Locale", - "ArrayBuffer", - "Set", - "Intl.Segmenter", - "Promise.prototype.finally", - "Int8Array", - "WeakMap", - "Array.prototype.flatMap", - "DataView", - "Atomics", - "Symbol.matchAll", - "String.prototype.includes", - "Map", - // "coalesce-expression", - // "regexp-named-groups", - // "Symbol.iterator", - // "object-rest", - // "class-fields-public", - // "class-static-methods-private", - // "Symbol.asyncIterator", - // "destructuring-binding", - // "BigInt", - // "arrow-function", - // "class-methods-private", - // "dynamic-import", - // "let", - // "Symbol", - // "import.meta", - // "class-static-fields-public", - // "class", - // "export-star-as-namespace-from-module", - // "Proxy", - // "top-level-await", - // "numeric-separator-literal", - // "object-spread", - // "default-parameters", - // "optional-chaining", - // "Symbol.toPrimitive", - // "async-iteration", - // "computed-property-names", - // "regexp-unicode-property-escapes", - // "class-fields-private", - // "generators", - // "async-functions", - // "new.target", - // "hashbang", - // "const", - // "json-superset", - // "class-static-fields-private", -]; - -type Res = Result>; - -struct Test262Runner<'a> { - desc: Description, - js: &'a str, -} - -#[derive(Debug, Clone)] -enum E262 { - General(String), - Success(String), -} -impl ::std::fmt::Display for E262 { - fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { - match self { - E262::General(ref s) => write!(f, "General Error:\n{}", s), - E262::Success(_) => write!(f, "Unexpected Successful Parsing"), - } - } -} -impl Error for E262 {} -impl E262 { - pub fn new(s: &str) -> Self { - E262::General(s.to_string()) - } - pub fn from(e: impl Error) -> Self { - Self::General(format!("{}", e)) - } -} - -impl<'a> Test262Runner<'a> { - pub fn new(js: &'a str) -> Res { - let desc = Self::find_desc_comment(js)?; - Ok(Self { desc, js }) - } - fn find_desc_comment(js: &str) -> Result { - let start_idx = js - .find("/*---") - .ok_or_else(|| E262::new("Unable to find comment start"))?; - let ending = js - .get(start_idx + 5..) - .ok_or_else(|| E262::new("Invalid start index"))?; - let end_idx = ending - .find("---*/") - .ok_or_else(|| E262::new("Unable to find comment end"))?; - let trimmed = ending - .get(..end_idx) - .ok_or_else(|| E262::new("Invalid end index"))?; - let ret = if trimmed.contains('\r') { - serde_yaml::from_str(&trimmed.replace("\r\n", "\n").replace('\r', "\n")) - .map_err(E262::from)? - } else { - serde_yaml::from_str(trimmed).map_err(E262::from)? - }; - Ok(ret) - } - pub fn clone_desc(&self) -> Description { - self.desc.clone() - } - pub fn run_strict(&self) -> Result<(), E262> { - if !self - .desc - .flags - .iter() - .any(|f| f == &Flag::Module || f == &Flag::NoStrict) - { - self.run_script(&format!("'use strict'\n{}", self.js))?; - } - Ok(()) - } - - pub fn run(&self) -> Result<(), E262> { - if !self.desc.flags.iter().any(|f| f == &Flag::OnlyStrict) { - if self.desc.flags.iter().any(|f| f == &Flag::Module) { - self.run_mod(&self.js)?; - } else { - self.run_script(&self.js)?; - } - } - Ok(()) - } - - fn run_script(&self, js: &str) -> Result<(), E262> { - self.run_(false, js)?; - Ok(()) - } - - fn run_mod(&self, js: &str) -> Result<(), E262> { - self.run_(true, js)?; - Ok(()) - } - - fn run_(&self, module: bool, js: &str) -> Result<(), E262> { - let mut p = Parser::builder() - .module(module) - .js(js) - .build() - .map_err(|e| E262::new(&format!("Error constructing parser{:?}", e)))?; - match p.parse() { - Ok(program) => { - if let Some(n) = &self.desc.negative { - if &n.phase == &Phase::Parse { - Err(E262::Success(format!("```ron\n{:#?}\n```", program))) - } else { - Ok(()) - } - } else { - Ok(()) - } - } - Err(e) => { - if let Some(n) = &self.desc.negative { - if &n.phase == &Phase::Parse { - Ok(()) - } else { - Err(E262::from(e)) - } - } else { - Err(E262::from(e)) - } - } - } - } -} - -#[derive(Debug, Deserialize, Clone, Default, Serialize)] -struct Description { - id: Option, - esid: Option, - es5id: Option, - es6id: Option, - info: Option, - description: Option, - negative: Option, - #[serde(default)] - includes: Vec, - #[serde(default)] - flags: Vec, - #[serde(default)] - locale: Vec, - #[serde(default)] - features: Vec, -} - -#[derive(Debug, Deserialize, Clone, Serialize)] -struct Negative { - phase: Phase, - #[serde(alias = "type")] - kind: Option, -} - -#[derive(Debug, PartialEq, Deserialize, Clone, Copy, Serialize)] -#[serde(rename_all = "camelCase")] -enum Phase { - Parse, - Early, - Resolution, - Runtime, -} - -#[derive(Debug, Deserialize, PartialEq, Clone, Copy, Serialize)] -#[serde(rename_all = "camelCase")] -enum Flag { - OnlyStrict, - NoStrict, - Module, - Raw, - Async, - Generated, - #[serde(alias = "CanBlockIsFalse")] - CanBlockIsFalse, - #[serde(alias = "CanBlockIsTrue")] - CanBlockIsTrue, - #[serde(alias = "non-deterministic")] - NonDeterministic, -} - -#[derive(Debug, Serialize, Clone)] -struct TestFailure { - pub path: PathBuf, - pub strict: TestStatus, - pub not_strict: TestStatus, - pub runner: TestStatus, - pub desc: Description, - pub js: String, -} -#[derive(Debug, Serialize, PartialEq, Clone)] -enum TestStatus { - Success, - Failure(String), - NotRun, -} -impl TestStatus { - pub fn is_failure(&self) -> bool { - if let TestStatus::Failure(_) = self { - true - } else { - false - } - } -} -impl ::std::fmt::Display for TestStatus { - fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { - match self { - TestStatus::Failure(ref s) => write!(f, "{}", s), - TestStatus::NotRun => write!(f, "Not Run"), - TestStatus::Success => write!(f, "Passed"), - } - } -} -impl TestFailure { - pub fn is_failure(&self) -> bool { - self.strict.is_failure() || self.not_strict.is_failure() || self.runner.is_failure() - } - pub fn to_markdown(&self) -> String { - let flags: Vec = self.desc.flags.iter().map(|f| format!("{:?}", f)).collect(); - let features: Vec = self - .desc - .features - .iter() - .map(|f| format!("{:?}", f)) - .collect(); - let desc = if let Some(ref inner) = self.desc.description { - inner.to_string() - } else { - "__not provided__".to_string() - }; - let info = if let Some(ref inner) = self.desc.info { - inner.to_string() - } else { - "__not provided__".to_string() - }; - let runner = format!("{}", self.runner); - let strict = format!("{}", self.strict); - let not = format!("{}", self.not_strict); - let mut id = if let Some(ref id) = self.desc.id { - format!("{} (id) ", id) - } else { - String::new() - }; - if let Some(ref i) = self.desc.esid { - id.push_str(&format!("{} (esid) ", i)); - } - if let Some(ref i) = self.desc.es5id { - id.push_str(&format!("{} (es5id) ", i)); - } - if let Some(ref i) = self.desc.es6id { - id.push_str(&format!("{} (es6id) ", i)); - } - format!( - "# {id} -## Description -{desc} - -### flags -{flags} - -### features -{features} - -### Info -``` -{info} -``` - -## Results -### runner -{runner} - -### strict -{strict} - -### not_strict -{not} - -```js -{js} -``` -", - id = id, - desc = desc, - info = info, - flags = flags.join(", "), - features = features.join(", "), - runner = runner, - strict = strict, - not = not, - js = self.js, - ) - } - pub fn get_first_id(&self, fallback: &str) -> String { - if let Some(ref i) = self.desc.esid { - i.to_string() - } else if let Some(ref i) = self.desc.es5id { - i.to_string() - } else if let Some(ref i) = self.desc.es6id { - i.to_string() - } else if let Some(ref i) = self.desc.description { - i.replace(" ", "_") - } else { - fallback.to_string() - } - } - - pub fn as_list_item(&self, path: &impl AsRef) -> String { - let mut href = format!("{}", path.as_ref().display()); - if href.starts_with("\\\\?") { - href = format!("\\\\{}", &href[3..]); - } - let mut li_class = "failure-container".to_string(); - let neg = if let Some(Negative { - phase: Phase::Parse, - .. - }) = &self.desc.negative - { - li_class.push_str(" negative"); - true - } else { - li_class.push_str(" positive"); - false - }; - let mut html = format!( - r#"
  • -{} -
    "#, - li_class, - href, - self.path.display() - ); - if let TestStatus::Failure(_) = self.runner { - html.push_str(r#"!!!"#) - } - if !neg { - let mut error_text = r#"
    "#.to_string(); - if let TestStatus::Failure(ref msg) = self.strict { - error_text.push_str(&format!(r#"{}"#, msg)); - } - if let TestStatus::Failure(ref msg) = self.not_strict { - error_text.push_str(&format!(r#"{}"#, msg)); - } - error_text.push_str("
    "); - html.push_str(&error_text); - } - if let Some(ref d) = self.desc.description { - html.push_str(&format!(r#"
    {}
    "#, d)); - } - - if !self.desc.features.is_empty() { - html.push_str(r#"
    features
    "#) - } - for feat in &self.desc.features { - html.push_str(&format!( - r#"
    {:?}
    "#, - feat - )); - } - html.push_str("
  • "); - html - } -} -static SKIP_COUNT: std::sync::atomic::AtomicU64 = std::sync::atomic::AtomicU64::new(0); -#[test] -fn test262() -> Res<()> { - let pb = ProgressBar::new_spinner(); - let sty = ProgressStyle::default_bar() - .template("{bar:40.cyan/blue} {pos:>7}/{len:7} {msg}") - .progress_chars("█▓▒░ "); - let path = Path::new("./test262"); - if !path.exists() { - panic!("unable to run this test without the test262 test suite see CONTRIBUTING.md for more information"); - } - pb.set_style(sty.clone()); - let (ct, paths) = get_paths(&path); - pb.set_length(ct as u64); - let failures: Vec = paths - .par_iter() - .progress_with(pb) - .filter_map(test_mapper) - .collect(); - - let write_failures = if let Ok(env_var) = ::std::env::var("RESSA_WRITE_FAILURES") { - println!("RESSA_WRITE_FAILURES={}", env_var); - env_var != "0" - } else { - println!("RESSA_WRITE_FAILURES does not exist"); - false - }; - let len = failures.len(); - let total_run = ct - SKIP_COUNT.load(std::sync::atomic::Ordering::Relaxed) as usize; - let fail_rate = len as f32 / total_run as f32; - let report = format!( - "Failed {} of {} test262, fail rate of {:02.2}%", - len, - total_run, - fail_rate * 100.0 - ); - if write_failures { - let mut collected = std::collections::HashMap::new(); - let mut feature_count = std::collections::HashMap::new(); - for failure in failures { - for feat in &failure.desc.features { - *feature_count.entry(feat.clone()).or_insert(0) += 1; - } - let id = failure.get_first_id("unknown"); - let sames = collected.entry(id).or_insert(vec![]); - sames.push(failure.clone()); - } - println!("getting ready to write failures"); - let base_path = PathBuf::from("failures"); - let base_path = base_path.join("test262"); - if base_path.exists() { - let _ = ::std::fs::remove_dir_all(&base_path); - } - let keep_writing = base_path.exists() - || if let Ok(_) = ::std::fs::create_dir_all(&base_path) { - println!("created directory"); - true - } else { - println!("failed to create directory"); - false - }; - let base_path = ::std::fs::canonicalize(&base_path)?; - if keep_writing { - use std::io::Write; - let root_path = base_path.join("index.html"); - let mut root_file = ::std::io::BufWriter::new(::std::fs::File::create(&root_path)?); - let head = format!(" - - ressa test 262 failures - - - - - - ", include_str!("./style.css")); - root_file.write_all(head.as_bytes())?; - root_file.write_all(format!("

    Failures

    {}
      ", report).as_bytes())?; - - for (name, ct) in feature_count { - root_file.write_all(format!("
    • {}{}
    • ", name, ct).as_bytes())?; - } - root_file.write_all(b"
      ").unwrap(); - let mut sorted = collected - .into_iter() - .collect::)>>(); - sorted.sort_by(|(_, lhs), (_, rhs)| rhs.len().cmp(&lhs.len())); - for (id, list) in sorted { - root_file.write_all( - format!( - "
    • {} ({})

        ", - id, - list.len() - ) - .as_bytes(), - )?; - for (i, fail) in list.iter().enumerate() { - if let Some(file_name) = fail.path.file_stem() { - let file_name = format!("{}{}.html", file_name.to_str().unwrap(), i); - let new_path = base_path.join(&file_name); - root_file.write(fail.as_list_item(&file_name).as_bytes())?; - let md = fail.to_markdown(); - let parser = pulldown_cmark::Parser::new(&md); - let mut f = BufWriter::new(File::create(&new_path)?); - f.write_all(head.as_bytes())?; - pulldown_cmark::html::write_html(&mut f, parser)?; - f.write_all(br#""#)?; - f.write_all( - format!("", include_str!("./addLineNumbers.js")) - .as_bytes(), - )?; - f.write_all(b"")?; - } - } - root_file.write_all(b"
    • ")?; - } - root_file.write_all( - format!( - "
    ", - include_str!("./removeNegative.js") - ) - .as_bytes(), - )?; - let root_str = format!("{}", root_path.display()); - if root_str.starts_with("\\\\?") { - println!("file://{}", &root_str[3..].replace('\\', "/")) - } else { - println!("file:{}", root_str.replace('\\', "/")); - } - } - } - - if len > 0 { - panic!("{}", report); - } - Ok(()) -} - -fn get_paths(path: &Path) -> (usize, Vec) { - let ct = walkdir::WalkDir::new(path) - .into_iter() - .filter_map(filter_mapper) - .count(); - let wd = walkdir::WalkDir::new(path) - .into_iter() - .filter_map(filter_mapper) - .collect(); - (ct, wd) -} - -fn filter_mapper(e: Result) -> Option { - let entry = e.ok()?; - let path = entry.path(); - if path.is_dir() { - None - } else { - let ext = path.extension()?; - if ext == "js" { - let file_name = path.file_name()?; - let file_name = file_name.to_str()?; - if file_name.ends_with("_FIXTURE.js") { - None - } else { - Some(path.to_path_buf()) - } - } else { - None - } - } -} - -fn test_mapper(path: &PathBuf) -> Option { - let contents = if let Ok(contents) = ::std::fs::read_to_string(path) { - contents - } else { - return None; - }; - let handler = match Test262Runner::new(&contents) { - Ok(handler) => handler, - Err(e) => { - return Some(TestFailure { - desc: Description::default(), - path: path.clone(), - strict: TestStatus::NotRun, - not_strict: TestStatus::NotRun, - runner: TestStatus::Failure(format!("{}", e)), - js: contents, - }); - } - }; - let mut ret = TestFailure { - desc: handler.clone_desc(), - path: path.clone(), - strict: TestStatus::NotRun, - not_strict: TestStatus::NotRun, - runner: TestStatus::Success, - js: contents.clone(), - }; - if !ret - .desc - .features - .iter() - .all(|f| INCLUDED_FEATURES.iter().any(|f2| f == f2)) - { - SKIP_COUNT.fetch_add(1, std::sync::atomic::Ordering::Relaxed); - return None; - } - if let Err(e) = handler.run_strict() { - let s = match e { - E262::General(ref general) => general.to_string(), - E262::Success(ref tree) => tree.to_string(), - }; - ret.strict = TestStatus::Failure(s); - } else { - ret.strict = TestStatus::Success; - } - if let Err(e) = handler.run() { - let s = match e { - E262::General(ref general) => general.to_string(), - E262::Success(ref tree) => tree.to_string(), - }; - ret.not_strict = TestStatus::Failure(s); - } else { - ret.not_strict = TestStatus::Success; - } - if ret.is_failure() { - Some(ret) - } else { - None - } -} - -#[test] -fn yam() { - let yaml = "es6id: asdf -flags: [onlyStrict] -info: | - data - data - data"; - let res: serde_yaml::Mapping = serde_yaml::from_str(yaml).expect("failed to parse yaml"); - eprintln!("{:?}", res); - let yaml2 = "[onlyStrict]"; - let res2: Vec = serde_yaml::from_str(yaml2).expect("failed to parse yaml2"); - eprintln!("{:?}", res2); -} - -#[test] -fn yeild_in_strict_mode() { - let _ = env_logger::try_init(); - let js = "'use strict' -var yield = 1;"; - let mut p = Parser::builder() - .js(js) - .build() - .expect("faile to create parser"); - match p.parse() { - Err(e) => println!("{}", e), - _ => panic!("Unexpected successful parse of yield as identifier"), - } - let js = "'use strict' -var \\u0079ield = 123;"; - let mut p = Parser::builder() - .js(js) - .build() - .expect("faile to create parser"); - match p.parse() { - Err(e) => println!("{}", e), - _ => panic!("Unexpected successful parse of escaped yield as identifier"), - } -} - -#[cfg(feature = "test_262_parser")] -mod parser { - - #[test] - fn test_262_parser() { - let path = Path::new("./test262-parser"); - if !path.exists() { - panic!("Unable to run this test without the test262-parser test suite, see CONTRIBUTING.md for more information"); - } - let (total, paths) = get_paths(&path); - let (early, fail, pass, pass_explicit) = categorize_paths(&paths); - let earlies = run_category(&early, true); - let fails = run_category(&fail, true); - let passes = run_category(&pass, false); - let explicits = run_category(&pass_explicit, false); - if !report_errors(total, &earlies, &fails, &passes, &explicits) { - panic!("Error in 262 parser tests"); - } - } - fn report_errors( - total: usize, - earlies: &[String], - fails: &[String], - passes: &[String], - explicits: &[String], - ) -> bool { - if earlies.is_empty() || fails.is_empty() || passes.is_empty() || explicits.is_empty() { - eprintln!("passed 100% of {} test", total); - return true; - } - let mut fail_ct = report_cat_errors("earlies", earlies); - fail_ct += report_cat_errors("fails", fails); - fail_ct += report_cat_errors("passes", passes); - fail_ct += report_cat_errors("explicits", explicits); - eprintln!( - "failed {} of {} ({:02.2}%)", - fail_ct, - total, - (fail_ct as f32 / total as f32) * 100f32 - ); - return false; - } - - fn report_cat_errors(name: &str, cat: &[String]) -> usize { - if cat.is_empty() { - return 0; - } - eprintln!("----------\n{}\n----------\n", name); - for (i, msg) in cat.iter().enumerate() { - eprintln!("{}: {}", i, msg); - } - eprintln!("\n----------\n"); - cat.len() - } - - fn run_category(paths: &[PathBuf], should_fail: bool) -> Vec { - let mut ret = Vec::new(); - for path in paths.iter() { - let js = std::fs::read_to_string(&path) - .expect(&format!("failed to read {}", path.display())); - let is_mod = format!("{}", path.display()).contains("module"); - let mut p = match Parser::builder().js(&js).module(is_mod).build() { - Ok(p) => p, - Err(e) => { - if should_fail { - ret.push(format!("{}\n\n{}", path.display(), e)); - } - continue; - } - }; - match p.parse() { - Ok(p) => { - if should_fail { - ret.push(format!("{}\n{:#?}", path.display(), p)); - } - } - Err(e) => { - if !should_fail { - ret.push(format!("{}\n{}", path.display(), e)) - } - } - } - } - ret - } - - fn categorize_paths( - paths: &[PathBuf], - ) -> (Vec, Vec, Vec, Vec) { - paths - .into_iter() - .fold((vec![], vec![], vec![], vec![]), |mut acc, path| { - if let Some(parent) = path.parent() { - let parent_str = format!("{}", parent.display()); - if parent_str.ends_with("early") { - acc.0.push(path.to_path_buf()); - } else if parent_str.ends_with("fail") { - acc.1.push(path.to_path_buf()); - } else if parent_str.ends_with("pass") { - acc.2.push(path.to_path_buf()); - } else if parent_str.ends_with("pass-explicit") { - acc.3.push(path.to_path_buf()); - } - } - acc - }) - } -} diff --git a/tests/test262/removeNegative.js b/tests/test262/removeNegative.js deleted file mode 100644 index d5e4828..0000000 --- a/tests/test262/removeNegative.js +++ /dev/null @@ -1,26 +0,0 @@ -(function() { - let button = document.getElementById('remove-neg-button'); - if (!button) { - return console.error('unable to find button'); - } - let hidden = false; - button.addEventListener('click', () => { - let negs = document.querySelectorAll('.negative'); - for (var i = 0; i < negs.length; i++) { - negs[i].style.display = 'none'; - } - let lis = document.querySelectorAll('.single-error-list'); - for (var i = 0; i < lis.length; i++) { - let item = lis[i]; - let posCount = item.querySelectorAll('.positive').length; - if(posCount === 0) { - item.style.display = 'none'; - } else { - console.log('positive not 0', posCount); - } - } - let positiveCount = document.querySelectorAll('.positive').length; - let report = document.querySelector('quote'); - report.innerText += ' positive: ' + positiveCount; - }); -})(); \ No newline at end of file diff --git a/tests/test262/style.css b/tests/test262/style.css deleted file mode 100644 index d55c138..0000000 --- a/tests/test262/style.css +++ /dev/null @@ -1,54 +0,0 @@ -* { - font-family: sans-serif; -} - -body { - max-width: 800px; - margin: auto; -} - -quote { - font-weight: bold; - color: ghostwhite; - background: grey; - padding: 5px 10px; - margin-bottom: 15px; -} - -ul { - display: flex; - flex-flow: column; -} - -li>a { - font-weight: bold; -} - -.failure-container>.additional-info { - margin-bottom: 10px; - border: 1px solid black; - max-width: 800px; -} - -.failure-container.negative>.additional-info { - border-color: red; -} - -.error-text { - font-size: 12pt; - display: flex; - flex-flow: column; -} -.error-text > span { - margin-left: 5px; -} - -.code-gutter { - background: black; - font-family: sans-serif; - color: white; - position: absolute; - display: flex; - flex-flow: column; - padding-top: .5em; -} \ No newline at end of file From 523f5841a28349b5fd81d1ffcdf640e4f119ae2a Mon Sep 17 00:00:00 2001 From: Robert Masen Date: Thu, 24 Nov 2022 11:43:00 -0600 Subject: [PATCH 06/22] chore: clean up features and optional dependencies --- Cargo.toml | 21 ++++++++------------- 1 file changed, 8 insertions(+), 13 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 680ece1..98352fe 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -15,24 +15,19 @@ backtrace = "0.3" ress = "0.11" resast = "0.5" log = "0.4" -walkdir = { version = "2", optional = true } -indicatif = { version = "0.12", optional = true, features = ["with_rayon"] } -rayon = { version = "1", optional = true } -pulldown-cmark = { version = "0.7", optional = true } res-regex = "0.1" hash-chain = "0.3" tracing = "0.1" [features] default = [] -major_libs = [] +# This feature populates a field on the Parser `_look_ahead` that will contain a debug format +# string of the look_ahead token. Very helpful when debugging this crate with gdb/lldb as sometimes +# the property shape of the `Token` isn't formatted well debug_look_ahead = [] -error_backtrace = [] -serialization = ["resast/serialization"] -esprima = ["resast/esprima"] -moz_central = ["walkdir"] -test_262 = ["indicatif", "rayon", "pulldown-cmark"] -test_262_parser = ["indicatif", "rayon", "pulldown-cmark"] +# This feature disables the moz_central tests by default as they tend to run long on most +# development machines and require a larger minimum stack size to pass +moz_central = [] [dev-dependencies] criterion = "0.3" @@ -54,8 +49,8 @@ harness = false [[example]] name = "js-to-json" path = "examples/js_to_json.rs" -required-features = ["serialization"] +required-features = ["resast/serialization"] [[example]] name = "js-to-json-esprima" path = "examples/js_to_json-esprima.rs" -required-features = ["esprima"] +required-features = ["resast/esprima"] From 82c153b7221e0aedee6a70053bb73908d4612dec Mon Sep 17 00:00:00 2001 From: Robert Masen Date: Thu, 24 Nov 2022 11:45:11 -0600 Subject: [PATCH 07/22] chore: remove backtrace as a dependency The functionality previously provided here is now more ergonomically available via `tracing` --- Cargo.toml | 1 - src/lib.rs | 1 - src/spanned/mod.rs | 24 ------------------------ 3 files changed, 26 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 98352fe..49130b8 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -11,7 +11,6 @@ categories = ["parsing", "text-processing", "web-programming"] edition = "2018" [dependencies] -backtrace = "0.3" ress = "0.11" resast = "0.5" log = "0.4" diff --git a/src/lib.rs b/src/lib.rs index cb7ecbe..a494404 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -58,7 +58,6 @@ extern crate ress; #[macro_use] extern crate log; -extern crate backtrace; use ress::prelude::*; pub use ress::Span; diff --git a/src/spanned/mod.rs b/src/spanned/mod.rs index ec2a4d4..9ae4779 100644 --- a/src/spanned/mod.rs +++ b/src/spanned/mod.rs @@ -6747,10 +6747,6 @@ where } fn expected_token_error(&self, item: &Item<&'b str>, expectation: &[&str]) -> Res { - if cfg!(feature = "error_backtrace") { - let bt = backtrace::Backtrace::new(); - error!("{:?}", bt); - } let pos = item.location.start; let expectation = expectation .iter() @@ -6770,10 +6766,6 @@ where )) } fn unexpected_token_error(&self, item: &Item<&'b str>, msg: &str) -> Res { - if cfg!(feature = "error_backtrace") { - let bt = backtrace::Backtrace::new(); - error!("{:?}", bt); - } let pos = item.location.start; let name = self.scanner.string_for(&item.span).unwrap_or_default(); @@ -6784,34 +6776,18 @@ where } fn tolerate_error(&self, err: Error) -> Result<(), Error> { if !self.config.tolerant { - if cfg!(feature = "error_backtrace") { - let bt = backtrace::Backtrace::new(); - error!("{:?}", bt); - } Err(err) } else { Ok(()) } } fn op_error(&self, msg: &str) -> Error { - if cfg!(feature = "error_backtrace") { - let bt = backtrace::Backtrace::new(); - error!("{:?}", bt); - } Error::OperationError(self.current_position, msg.to_owned()) } fn redecl_error(&self, name: &str) -> Error { - if cfg!(feature = "error_backtrace") { - let bt = backtrace::Backtrace::new(); - error!("{:?}", bt); - } Error::Redecl(self.current_position, name.to_owned()) } fn reinterpret_error(&self, from: &str, to: &str) -> Error { - if cfg!(feature = "error_backtrace") { - let bt = backtrace::Backtrace::new(); - error!("{:?}", bt); - } Error::UnableToReinterpret(self.current_position, from.to_owned(), to.to_owned()) } From 630db824e50798278f93508f62face341a70aa73 Mon Sep 17 00:00:00 2001 From: Robert Masen Date: Thu, 24 Nov 2022 11:50:50 -0600 Subject: [PATCH 08/22] chore: remove feature gates from json examples --- Cargo.toml | 3 +-- examples/js_to_json-esprima.rs | 10 ++-------- examples/js_to_json.rs | 9 ++------- 3 files changed, 5 insertions(+), 17 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 49130b8..e2b0c33 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -37,8 +37,7 @@ lazy_static = "1" serde = { version = "1", features = ["derive"] } serde_json = "1" serde_yaml = "0.9" -term = "0.6" -term-painter = "0.2.4" +term-painter = "0.3" walkdir = "2" [[bench]] diff --git a/examples/js_to_json-esprima.rs b/examples/js_to_json-esprima.rs index 2caffe1..c9cb5fc 100644 --- a/examples/js_to_json-esprima.rs +++ b/examples/js_to_json-esprima.rs @@ -1,5 +1,5 @@ use docopt::Docopt; -use serde::{Deserialize, Serialize}; +use serde::Deserialize; use std::{ error::Error, ffi::OsStr, @@ -29,12 +29,6 @@ struct Args { flag_out: Option, } -#[cfg(not(feature = "esprima"))] -fn main() { - println!("Please run again with --features esprima"); -} - -#[cfg(feature = "esprima")] fn main() -> Result<(), Box> { let args: Args = Docopt::new(USAGE) .and_then(|o| o.deserialize()) @@ -54,7 +48,7 @@ fn main() -> Result<(), Box> { } Ok(()) } -#[cfg(feature = "esprima")] + fn gen_json(from: PathBuf, pretty: bool) -> Result> { let js = read_to_string(&from)?; let mut p = Parser::builder() diff --git a/examples/js_to_json.rs b/examples/js_to_json.rs index 32d5ca0..c9cb5fc 100644 --- a/examples/js_to_json.rs +++ b/examples/js_to_json.rs @@ -1,4 +1,5 @@ use docopt::Docopt; +use serde::Deserialize; use std::{ error::Error, ffi::OsStr, @@ -28,12 +29,6 @@ struct Args { flag_out: Option, } -#[cfg(not(feature = "serialization"))] -fn main() { - println!("Please run again with --features serialization"); -} - -#[cfg(feature = "serialization")] fn main() -> Result<(), Box> { let args: Args = Docopt::new(USAGE) .and_then(|o| o.deserialize()) @@ -53,7 +48,7 @@ fn main() -> Result<(), Box> { } Ok(()) } -#[cfg(feature = "serialization")] + fn gen_json(from: PathBuf, pretty: bool) -> Result> { let js = read_to_string(&from)?; let mut p = Parser::builder() From 04e87b9e1b4a6176493697ba9e731cd17c8a2936 Mon Sep 17 00:00:00 2001 From: Robert Masen Date: Thu, 24 Nov 2022 11:55:22 -0600 Subject: [PATCH 09/22] chore bump dev-dependencies versions --- Cargo.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index e2b0c33..1fd1520 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -29,9 +29,9 @@ debug_look_ahead = [] moz_central = [] [dev-dependencies] -criterion = "0.3" +criterion = "0.4" docopt = "1" -env_logger = "0.6" +env_logger = "0.9" insta = "1.19" lazy_static = "1" serde = { version = "1", features = ["derive"] } From abf69b4b583a4b35889706db235909a621e89aa8 Mon Sep 17 00:00:00 2001 From: Robert Masen Date: Thu, 24 Nov 2022 11:56:13 -0600 Subject: [PATCH 10/22] chore: alphabetize dependencies --- Cargo.toml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 1fd1520..ca90a89 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -11,11 +11,11 @@ categories = ["parsing", "text-processing", "web-programming"] edition = "2018" [dependencies] +hash-chain = "0.3" +log = "0.4" ress = "0.11" resast = "0.5" -log = "0.4" res-regex = "0.1" -hash-chain = "0.3" tracing = "0.1" [features] @@ -48,6 +48,7 @@ harness = false name = "js-to-json" path = "examples/js_to_json.rs" required-features = ["resast/serialization"] + [[example]] name = "js-to-json-esprima" path = "examples/js_to_json-esprima.rs" From 4965c8dcc7c4b9fca1384e08868e78cedd36359b Mon Sep 17 00:00:00 2001 From: Robert Masen Date: Thu, 24 Nov 2022 12:02:29 -0600 Subject: [PATCH 11/22] chore: remove extern crate usage --- src/formal_params.rs | 10 +- src/lexical_names.rs | 14 +- src/lhs.rs | 16 +- src/lib.rs | 7 +- src/spanned/mod.rs | 559 +++++++++++++++++++++++++------------------ tests/all/ecma262.rs | 2 +- 6 files changed, 353 insertions(+), 255 deletions(-) diff --git a/src/formal_params.rs b/src/formal_params.rs index fd8b99d..3973ff7 100644 --- a/src/formal_params.rs +++ b/src/formal_params.rs @@ -24,7 +24,7 @@ pub struct FormalsList<'a> { pub fn have_duplicates<'a>(params: &[Param<'a>]) -> bool { if let Err(first_dupe) = find_duplicate(params) { - error!("Found duplicate parameter: {}", first_dupe); + log::error!("Found duplicate parameter: {}", first_dupe); true } else { false @@ -52,7 +52,7 @@ pub fn update_with_expr<'a>( set: &mut HashSet>, ) -> Result<(), Cow<'a, str>> { use resast::spanned::expr::{AssignExpr, AssignLeft}; - trace!("update_with_expr {:?} {:?}", expr, set); + log::trace!("update_with_expr {:?} {:?}", expr, set); match expr { resast::spanned::expr::Expr::Ident(id) => { if !set.insert(id.slice.source.clone()) { @@ -87,7 +87,7 @@ pub fn update_with_pat<'a>( pat: &resast::spanned::pat::Pat<'a>, set: &mut HashSet>, ) -> Result<(), Cow<'a, str>> { - trace!("update_with_pat {:?} {:?}", pat, set); + log::trace!("update_with_pat {:?} {:?}", pat, set); match pat { Pat::Ident(id) => { if !set.insert(id.slice.source.clone()) { @@ -151,7 +151,7 @@ fn update_with_prop_value<'a>( prop: &PropValue<'a>, set: &mut HashSet>, ) -> Result<(), Cow<'a, str>> { - trace!("update_with_prop {:?}, {:?}", prop, set); + log::trace!("update_with_prop {:?}, {:?}", prop, set); match &prop { PropValue::Expr(expr) => { update_with_expr(expr, set)?; @@ -176,7 +176,7 @@ fn update_with_prop_key<'a>( } fn update_with_lit<'a>(lit: &Lit<'a>, set: &mut HashSet>) -> Result<(), Cow<'a, str>> { - trace!("update_with_lit {:?}, {:?}", lit, set); + log::trace!("update_with_lit {:?}, {:?}", lit, set); if let Lit::String(s) = lit { if !set.insert(s.content.source.clone()) { return Err(s.content.source.clone()); diff --git a/src/lexical_names.rs b/src/lexical_names.rs index 6f9bd4c..87ce162 100644 --- a/src/lexical_names.rs +++ b/src/lexical_names.rs @@ -106,7 +106,7 @@ impl<'a> DuplicateNameDetector<'a> { } DeclKind::Var(is_module) => { for (idx, scope) in self.states.iter().enumerate().rev() { - trace!("checking scope {}", idx); + log::trace!("checking scope {}", idx); let error = if self.lex.has_at(idx, &i) && !scope.is_simple_catch() { if let Some(Some(lex)) = self.first_lexes.get(idx) { &i != lex @@ -114,7 +114,7 @@ impl<'a> DuplicateNameDetector<'a> { true } } else { - trace!( + log::trace!( "looking for dupe in {} funcs_as_var: {}, funcs_has {}", idx, scope.funcs_as_var(is_module), @@ -144,15 +144,15 @@ impl<'a> DuplicateNameDetector<'a> { } DeclKind::Func(is_module) => { let state = if let Some(state) = self.states.last() { - trace!("last state found {:?}", state); + log::trace!("last state found {:?}", state); *state } else { Scope::default() }; self.check_lex(i.clone(), pos)?; - trace!("not in lexical decls"); + log::trace!("not in lexical decls"); if !state.funcs_as_var(is_module) { - trace!("state does not indicate functions should be treated as vars"); + log::trace!("state does not indicate functions should be treated as vars"); self.check_var(i.clone(), pos)?; } self.add_func(i, pos) @@ -327,7 +327,7 @@ impl<'a> DuplicateNameDetector<'a> { } pub fn undefined_module_export_guard(&mut self, id: Cow<'a, str>) { - trace!("add_module_export: {}", id); + log::trace!("add_module_export: {}", id); if !self.var.has_at(0, &id) && !self.lex.has_at(0, &id) { self.undefined_module_exports.insert(id); } @@ -348,7 +348,7 @@ impl<'a> DuplicateNameDetector<'a> { /// check the last tier in the chain map for an identifier fn check<'a>(map: &mut LexMap<'a>, i: Cow<'a, str>, pos: Position) -> Res<()> { log::trace!("check {:?} {:?} {:?}", map, i, pos); - trace!("checking for {}", i); + log::trace!("checking for {}", i); if map.last_has(&i) { if let Some(old_pos) = map.get(&i) { if *old_pos < pos { diff --git a/src/lhs.rs b/src/lhs.rs index 3327abb..1c5db6b 100644 --- a/src/lhs.rs +++ b/src/lhs.rs @@ -13,7 +13,7 @@ use std::{borrow::Cow, collections::HashSet}; type Res = Result<(), Error>; pub fn is_simple_expr<'a>(expr: &Expr<'a>) -> bool { - trace!("is_simple_expr {:?}", expr); + log::trace!("is_simple_expr {:?}", expr); match expr { Expr::This(_) => false, _ => true, @@ -21,7 +21,7 @@ pub fn is_simple_expr<'a>(expr: &Expr<'a>) -> bool { } pub fn is_simple_pat<'a>(pat: &Pat<'a>) -> bool { - trace!("is_simple_pat {:?}", pat); + log::trace!("is_simple_pat {:?}", pat); match pat { Pat::Ident(ref id) => match &*id.slice.source { "this" => false, @@ -179,7 +179,7 @@ pub fn check_loop_left<'a>(left: &LoopLeft<'a>, pos: Position) -> Res { } pub fn check_loop_head_expr<'a>(left: &Expr<'a>, pos: Position) -> Res { - debug!("check_loop_head_expr"); + log::debug!("check_loop_head_expr"); let mut set = HashSet::new(); match left { Expr::Array(ref a) => check_binding_array(&a.elements, pos, &mut set), @@ -194,7 +194,7 @@ fn check_binding_obj<'a>( pos: Position, set: &mut HashSet>, ) -> Res { - debug!("check_binding_obj"); + log::debug!("check_binding_obj"); for part in obj { if let ObjProp::Prop(prop) = &part.item { match prop { @@ -213,7 +213,7 @@ pub fn check_binding_array<'a>( pos: Position, set: &mut HashSet>, ) -> Res { - debug!("check_binding_array"); + log::debug!("check_binding_array"); for part in a { if let Some(part) = &part.item { if let Expr::Sequence(_) = part { @@ -230,7 +230,7 @@ fn check_loop_left_prop_key<'a>( pos: Position, set: &mut HashSet>, ) -> Res { - debug!("check_loop_left_prop_key"); + log::debug!("check_loop_left_prop_key"); match prop { PropKey::Expr(expr) => check_loop_left_expr(expr, pos, set), PropKey::Pat(pat) => check_loop_left_pat(pat, pos, set), @@ -243,7 +243,7 @@ fn check_loop_left_expr<'a>( pos: Position, set: &mut HashSet>, ) -> Res { - debug!("check_loop_left_expr"); + log::debug!("check_loop_left_expr"); match expr { Expr::Ident(ident) => { if !set.insert(ident.slice.source.clone()) { @@ -257,7 +257,7 @@ fn check_loop_left_expr<'a>( } fn check_loop_left_pat<'a>(pat: &Pat<'a>, pos: Position, set: &mut HashSet>) -> Res { - debug!("check_loop_left_pat"); + log::debug!("check_loop_left_pat"); match pat { Pat::Ident(ident) => { if !set.insert(ident.slice.source.clone()) { diff --git a/src/lib.rs b/src/lib.rs index a494404..cf62a84 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -55,9 +55,6 @@ //! checkout the `examples` folders for slightly larger //! examples. //! -extern crate ress; -#[macro_use] -extern crate log; use ress::prelude::*; pub use ress::Span; @@ -153,14 +150,14 @@ struct Context<'a> { impl Default for Config { fn default() -> Self { - trace!("default config"); + log::trace!("default config"); Self { tolerant: false } } } impl<'a> Default for Context<'a> { fn default() -> Self { - trace!("default context",); + log::trace!("default context",); Self { is_module: false, allow_await: true, diff --git a/src/spanned/mod.rs b/src/spanned/mod.rs index 9ae4779..6d2fd0d 100644 --- a/src/spanned/mod.rs +++ b/src/spanned/mod.rs @@ -351,9 +351,10 @@ where /// ``` #[tracing::instrument(level = "trace", skip(self))] pub fn parse(&mut self) -> Res { - debug!( + log::debug!( "{}: parse_script {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); if self.context.is_module { self.context.strict = true; @@ -369,9 +370,10 @@ where /// Parse all of the directives into a single prologue #[tracing::instrument(level = "trace", skip(self))] fn parse_directive_prologues(&mut self) -> Res>> { - debug!( + log::debug!( "{}: parse_directive_prologues {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let mut ret = Vec::new(); loop { @@ -386,9 +388,10 @@ where /// Parse a single directive #[tracing::instrument(level = "trace", skip(self))] fn parse_directive(&mut self) -> Res> { - debug!( + log::debug!( "{}: parse_directive {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let orig = self.look_ahead.clone(); let expr = self.parse_expression()?; @@ -405,9 +408,10 @@ where self.context.found_directive_octal_escape = self.context.found_directive_octal_escape || oct; } - debug!( + log::debug!( "updated context.strict to {}, allowed?: {}", - self.context.strict, self.context.allow_strict_directive + self.context.strict, + self.context.allow_strict_directive ); if !self.context.allow_strict_directive && self.context.strict { return self.unexpected_token_error(&orig, "use strict in an invalid location"); @@ -439,7 +443,7 @@ where /// otherwise we move on to `Parser::parse_statement` #[tracing::instrument(level = "trace", skip(self))] fn parse_statement_list_item(&mut self, ctx: Option>) -> Res> { - debug!("{}: parse_statement_list_item", self.look_ahead.span.start); + log::debug!("{}: parse_statement_list_item", self.look_ahead.span.start); self.context.set_is_assignment_target(true); self.context.set_is_binding_element(true); let tok = self.look_ahead.token.clone(); @@ -706,10 +710,10 @@ where #[tracing::instrument(level = "trace", skip(self))] fn parse_export_decl(&mut self) -> Res<(ModExport<'b>, Option>)> { - debug!("{} parse_export_decl", self.look_ahead_position); + log::debug!("{} parse_export_decl", self.look_ahead_position); let mut semi = None; if let Some(scope) = self.context.lexical_names.last_scope() { - trace!("scope: {:?}", self.context.lexical_names.states); + log::trace!("scope: {:?}", self.context.lexical_names.states); if !scope.is_top() { return Err(Error::InvalidExportError(self.current_position)); } @@ -968,9 +972,10 @@ where } #[tracing::instrument(level = "trace", skip(self))] fn parse_statement(&mut self, ctx: Option>) -> Res> { - debug!( + log::debug!( "{}: parse_statement {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let lh = self.look_ahead.token.clone(); let stmt = match lh { @@ -1110,9 +1115,10 @@ where #[tracing::instrument(level = "trace", skip(self))] fn parse_with_stmt(&mut self) -> Res> { - debug!( + log::debug!( "{}: parse_with_stmt {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); if self.context.strict { self.tolerate_error(Error::NonStrictFeatureInStrictContext( @@ -1157,9 +1163,10 @@ where #[tracing::instrument(level = "trace", skip(self))] fn parse_while_stmt(&mut self) -> Res> { - debug!( + log::debug!( "{}: parse_while_stmt {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let keyword = self.expect_keyword(Keyword::While(()))?; let open_paren = self.expect_punct(Punct::OpenParen)?; @@ -1194,9 +1201,10 @@ where #[tracing::instrument(level = "trace", skip(self))] fn parse_var_stmt(&mut self) -> Res> { - debug!( + log::debug!( "{}: parse_var_stmt {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let keyword = self.expect_keyword(Keyword::Var(()))?; let decls = self.parse_var_decl_list(false)?; @@ -1213,9 +1221,10 @@ where #[tracing::instrument(level = "trace", skip(self))] fn parse_var_decl_list(&mut self, in_for: bool) -> Res>>> { - debug!( + log::debug!( "{} parse_var_decl_list {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let start = self.look_ahead_position; let first = self.parse_var_decl(in_for)?; @@ -1245,9 +1254,10 @@ where #[tracing::instrument(level = "trace", skip(self))] fn parse_var_decl(&mut self, in_for: bool) -> Res> { - debug!( + log::debug!( "{} parse_variable_decl_list in_for: {}", - self.look_ahead.span.start, in_for + self.look_ahead.span.start, + in_for ); let (_, patt) = self.parse_pattern(true, &mut Vec::new())?; if self.context.strict && Self::is_restricted(&patt) { @@ -1276,9 +1286,10 @@ where #[tracing::instrument(level = "trace", skip(self))] fn parse_try_stmt(&mut self) -> Res> { - debug!( + log::debug!( "{}: parse_try_stmt {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let keyword = self.expect_keyword(Keyword::Try(()))?; let block = self.parse_block(true)?; @@ -1321,9 +1332,10 @@ where #[tracing::instrument(level = "trace", skip(self))] fn parse_catch_clause(&mut self) -> Res> { - debug!( + log::debug!( "{}: parse_catch_clause {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let keyword = self.expect_keyword(Keyword::Catch(()))?; let mut param_pos = self.look_ahead_position; @@ -1388,9 +1400,10 @@ where #[tracing::instrument(level = "trace", skip(self))] fn parse_finally_clause(&mut self) -> Res> { - debug!( + log::debug!( "{}: parse_finally_clause {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let keyword = self.expect_keyword(Keyword::Finally(()))?; let body = self.parse_block(true)?; @@ -1399,9 +1412,10 @@ where #[tracing::instrument(level = "trace", skip(self))] fn parse_throw_stmt(&mut self) -> Res<(Slice<'b>, Expr<'b>, Option>)> { - debug!( + log::debug!( "{}: parse_throw_stmt {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let keyword = self.expect_keyword(Keyword::Throw(()))?; if self.context.has_line_term || self.at_punct(Punct::SemiColon) { @@ -1414,9 +1428,10 @@ where #[tracing::instrument(level = "trace", skip(self))] fn parse_switch_stmt(&mut self) -> Res> { - debug!( + log::debug!( "{}: parse_switch_stmt {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let keyword = self.expect_keyword(Keyword::Switch(()))?; let open_paren = self.expect_punct(Punct::OpenParen)?; @@ -1457,9 +1472,10 @@ where #[tracing::instrument(level = "trace", skip(self))] fn parse_switch_case(&mut self) -> Res> { - debug!( + log::debug!( "{}: parse_switch_case {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let (keyword, test) = if self.at_keyword(Keyword::Default(())) { (self.expect_keyword(Keyword::Default(()))?, None) @@ -1490,9 +1506,10 @@ where #[tracing::instrument(level = "trace", skip(self))] fn parse_return_stmt(&mut self) -> Res<(Slice<'b>, Option>, Option>)> { - debug!( + log::debug!( "{}: parse_return_stmt {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); if !self.context.in_function_body { return self @@ -1509,16 +1526,17 @@ where } else { None }; - debug!("return statement: {:?} {}", ret, self.context.allow_yield); + log::debug!("return statement: {:?} {}", ret, self.context.allow_yield); let semi_colon = self.consume_semicolon()?; Ok((keyword, ret, semi_colon)) } #[tracing::instrument(level = "trace", skip(self))] fn parse_if_stmt(&mut self) -> Res> { - debug!( + log::debug!( "{}: parse_if_stmt {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let keyword = self.expect_keyword(Keyword::If(()))?; @@ -1565,9 +1583,10 @@ where #[tracing::instrument(level = "trace", skip(self))] fn parse_if_clause(&mut self) -> Res> { - debug!( + log::debug!( "{}: parse_if_clause {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); if self.context.strict && self.at_keyword(Keyword::Function(())) && !self.config.tolerant { return self.unexpected_token_error(&self.look_ahead, ""); @@ -1576,9 +1595,11 @@ where } fn parse_fn_stmt(&mut self, decl_pos: bool) -> Res> { - debug!( + log::debug!( "{}: parse_fn_stmt {:?} {}", - self.look_ahead.span.start, self.look_ahead.token, decl_pos, + self.look_ahead.span.start, + self.look_ahead.token, + decl_pos, ); let async_keyword = if self.at_contextual_keyword("async") { let keyword = self.next_item()?; @@ -1593,9 +1614,10 @@ where #[tracing::instrument(level = "trace", skip(self))] fn parse_for_stmt(&mut self) -> Res> { - debug!( + log::debug!( "{}: parse_for_stmt {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let keyword_for = self.expect_keyword(Keyword::For(()))?; let is_await = if self.at_keyword(Keyword::Await(())) { @@ -1715,7 +1737,7 @@ where let prev_in = self.context.allow_in; self.context.allow_in = false; let mut decls = self.parse_binding_list(&var_kind, true)?; - debug!("{:?}", decls); + log::debug!("{:?}", decls); self.context.allow_in = prev_in; if decls.len() == 1 { let decl = if let Some(d) = decls.pop() { @@ -1865,9 +1887,10 @@ where for_keyword: Slice<'b>, open_paren: Slice<'b>, ) -> Res> { - debug!( + log::debug!( "{}: parse_for_loop {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let init = if self.at_punct(Punct::SemiColon) { None @@ -1884,9 +1907,10 @@ where keyword_for: Slice<'b>, open_paren: Slice<'b>, ) -> Res> { - debug!( + log::debug!( "{}: parse_for_loop_cont {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let semi1 = self.expect_punct(Punct::SemiColon)?; let test = if self.at_punct(Punct::SemiColon) { @@ -1925,9 +1949,10 @@ where keyword_for: Slice<'b>, open_paren: Slice<'b>, ) -> Res> { - debug!( + log::debug!( "{}: parse_for_in_loop {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); if let LoopLeft::Variable( ref kind, @@ -1986,9 +2011,10 @@ where keyword_for: Slice<'b>, open_paren: Slice<'b>, ) -> Res> { - debug!( + log::debug!( "{}: parse_for_of_loop {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); if let LoopLeft::Variable(_, VarDecl { init: Some(_), .. }) = left { return Err(Error::ForOfInAssign( @@ -2024,9 +2050,10 @@ where #[tracing::instrument(level = "trace", skip(self))] fn parse_loop_body(&mut self) -> Res> { - debug!( + log::debug!( "{}: parse_loop_body {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let prev_iter = self.context.in_iteration; self.context.in_iteration = true; @@ -2037,9 +2064,10 @@ where #[tracing::instrument(level = "trace", skip(self))] fn parse_do_while_stmt(&mut self) -> Res> { - debug!( + log::debug!( "{}: parse_do_while_stmt {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let start_pos = self.look_ahead_position; let keyword_do = self.expect_keyword(Keyword::Do(()))?; @@ -2072,18 +2100,20 @@ where #[tracing::instrument(level = "trace", skip(self))] fn parse_break_stmt(&mut self) -> Res<(Slice<'b>, Option>, Option>)> { - debug!( + log::debug!( "{}: parse_break_stmt {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); self.parse_optionally_labeled_statement(Keyword::Break(())) } #[tracing::instrument(level = "trace", skip(self))] fn parse_continue_stmt(&mut self) -> Res<(Slice<'b>, Option>, Option>)> { - debug!( + log::debug!( "{}: parse_continue_stmt {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); self.parse_optionally_labeled_statement(Keyword::Continue(())) } @@ -2092,7 +2122,7 @@ where &mut self, k: Keyword<()>, ) -> Res<(Slice<'b>, Option>, Option>)> { - debug!( + log::debug!( "{}: parse_optionally_labeled_statement", self.look_ahead.span.start ); @@ -2131,9 +2161,10 @@ where #[tracing::instrument(level = "trace", skip(self))] fn parse_debugger_stmt(&mut self) -> Res> { - debug!( + log::debug!( "{}: parse_debugger_stmt {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let keyword = self.expect_keyword(Keyword::Debugger(()))?; let semi_colon = self.consume_semicolon()?; @@ -2145,7 +2176,7 @@ where #[tracing::instrument(level = "trace", skip(self))] fn parse_labelled_statement(&mut self) -> Res> { - debug!("parse_labelled_statement, {:?}", self.look_ahead.token); + log::debug!("parse_labelled_statement, {:?}", self.look_ahead.token); let start = self.look_ahead.span; let pos = self.look_ahead_position; let expr = self.parse_expression()?; @@ -2221,9 +2252,10 @@ where #[tracing::instrument(level = "trace", skip(self))] fn parse_expression_statement(&mut self) -> Res<(Expr<'b>, Option>)> { - debug!( + log::debug!( "{}: parse_expression_statement {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); self.expr_stmt_guard()?; let ret = self.parse_expression()?; @@ -2275,9 +2307,10 @@ where #[tracing::instrument(level = "trace", skip(self))] fn parse_expression(&mut self) -> Res> { - debug!( + log::debug!( "{}: parse_expression {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let ret = isolate_cover_grammar!(self, parse_assignment_expr)?; if self.at_punct(Punct::Comma) { @@ -2299,9 +2332,10 @@ where } fn parse_block(&mut self, new_scope: bool) -> Res> { - debug!( + log::debug!( "{}: parse_block {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let open_brace = self.expect_punct(Punct::OpenBrace)?; if new_scope { @@ -2338,9 +2372,10 @@ where } fn parse_lexical_decl(&mut self, in_for: bool) -> Res> { - debug!( + log::debug!( "{}: parse_lexical_decl {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let next = self.next_item()?; let kind = match next.token { @@ -2365,9 +2400,10 @@ where kind: &VarKind, in_for: bool, ) -> Res>>> { - debug!( + log::debug!( "{}: parse_binding_list {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let k = if matches!(kind, VarKind::Var(_)) { lexical_names::DeclKind::Var(self.context.is_module) @@ -2394,9 +2430,10 @@ where } fn parse_variable_decl_list(&mut self, in_for: bool) -> Res>>> { - debug!( + log::debug!( "{} parse_variable_decl_list in_for: {}", - self.look_ahead.span.start, in_for + self.look_ahead.span.start, + in_for ); let first = self.parse_var_decl(in_for)?; let mut ret = vec![ListEntry::no_comma(first)]; @@ -2419,9 +2456,10 @@ where } fn parse_lexical_binding(&mut self, kind: &VarKind, in_for: bool) -> Res> { - debug!( + log::debug!( "{}: parse_lexical_binding {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let start = self.look_ahead.clone(); let (_, id) = self.parse_pattern(matches!(kind, VarKind::Var(_)), &mut Vec::new())?; @@ -2461,9 +2499,10 @@ where } fn parse_function_decl(&mut self, opt_ident: bool) -> Res> { - debug!( + log::debug!( "{}: parse_function_decl {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let start_pos = self.look_ahead_position; let keyword_async = if self.at_contextual_keyword("async") { @@ -2505,7 +2544,7 @@ where let prev_super = self.context.allow_super; self.context.allow_await = keyword_async.is_none(); self.context.allow_yield = star.is_none(); - debug!("setting allow_super to {}", false); + log::debug!("setting allow_super to {}", false); self.context.set_allow_super(false); let param_start = self.look_ahead_position; self.add_scope(lexical_names::Scope::FuncTop); @@ -2550,7 +2589,7 @@ where self.context.allow_strict_directive = prev_allow_strict; self.context.allow_await = prev_await; self.context.allow_yield = prev_yield; - debug!("setting allow_super to {}", prev_super); + log::debug!("setting allow_super to {}", prev_super); self.context.set_allow_super(prev_super); self.remove_scope(); Ok(Func { @@ -2573,9 +2612,12 @@ where is_hanging: bool, keyword_async: Option>, ) -> Res> { - debug!( + log::debug!( "{} parse_func( is_stmt: {}, opt_id: {}, is_hanging: {}", - self.look_ahead.span.start, is_stmt, opt_id, is_hanging + self.look_ahead.span.start, + is_stmt, + opt_id, + is_hanging ); let star = if self.at_punct(Punct::Asterisk) { let star = self.next_item()?; @@ -2596,13 +2638,13 @@ where } let id = self.parse_var_ident(false)?; if !is_hanging { - trace!( + log::trace!( "function not hanging, strict: {}, generator: {}, async: {}", self.context.strict, star.is_some(), keyword_async.is_some() ); - trace!( + log::trace!( "last scope: {:?}\n{:?}", self.context.lexical_names.last_scope(), self.context.lexical_names.states @@ -2641,9 +2683,10 @@ where let param_start = self.look_ahead_position; self.add_scope(lexical_names::Scope::FuncTop); let params = self.parse_func_params()?; - debug!( + log::debug!( "any params restricted? {}, {}", - params.found_restricted, params.strict + params.found_restricted, + params.strict ); let prev_strict = self.context.strict; let prev_oct = self.context.found_directive_octal_escape; @@ -2687,17 +2730,19 @@ where } #[tracing::instrument(level = "trace", skip(self))] fn remove_scope(&mut self) { - trace!("{} remove_scope", self.look_ahead.span.start); + log::trace!("{} remove_scope", self.look_ahead.span.start); self.context.lexical_names.remove_child(); } fn add_scope(&mut self, scope: lexical_names::Scope) { - trace!("{} add_scope {:?}", self.look_ahead.span.start, scope); + log::trace!("{} add_scope {:?}", self.look_ahead.span.start, scope); self.context.lexical_names.new_child(scope); } fn declare_pat(&mut self, pat: &Pat<'b>, kind: DeclKind, pos: Position) -> Res<()> { - info!( + log::info!( "{} declare_pat {:?} {:?}", - self.look_ahead.span.start, pat, pos + self.look_ahead.span.start, + pat, + pos ); self.context.lexical_names.declare_pat(pat, kind, pos) } @@ -2722,9 +2767,10 @@ where #[tracing::instrument(level = "trace", skip(self))] fn parse_function_source_el(&mut self) -> Res> { - debug!( + log::debug!( "{}: parse_function_source_el {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let open_brace = self.expect_punct(Punct::OpenBrace)?; let mut body = self.parse_directive_prologues()?; @@ -2755,9 +2801,10 @@ where } fn parse_class_decl(&mut self, opt_ident: bool, check_id: bool) -> Res> { - debug!( + log::debug!( "{}: parse_class_decl {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let prev_strict = self.context.strict; let prev_oct = self.context.found_directive_octal_escape; @@ -2822,9 +2869,10 @@ where #[tracing::instrument(level = "trace", skip(self))] fn parse_class_body(&mut self) -> Res> { - debug!( + log::debug!( "{}: parse_class_body {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let mut props = Vec::new(); let mut has_ctor = false; @@ -2850,9 +2898,10 @@ where /// and (false, Prop) otherwise #[tracing::instrument(level = "trace", skip(self))] fn parse_class_el(&mut self, has_ctor: bool) -> Res> { - debug!( + log::debug!( "{}: parse_class_el {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let start = self.look_ahead_position; let keyword_static = if self.at_contextual_keyword("static") { @@ -2887,7 +2936,7 @@ where }; let star = if self.at_punct(Punct::Asterisk) { - debug!("found leading asterisk"); + log::debug!("found leading asterisk"); let star = self.next_item()?; self.slice_from(&star) } else { @@ -2967,9 +3016,10 @@ where keyword_static: Option>, mut star: Option>, ) -> Res> { - debug!( + log::debug!( "{}: method_def {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let mut keyword_async = None; let id = if self.at_contextual_keyword("get") { @@ -3019,9 +3069,10 @@ where keyword_static: Option>, item_get: Item<&'b str>, ) -> Res> { - debug!( + log::debug!( "{}: get_method_def {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let keyword_get = self.get_slice(&item_get)?; let id = self.parse_object_property_key()?; @@ -3044,9 +3095,10 @@ where keyword_static: Option>, item_set: Item<&'b str>, ) -> Res> { - debug!( + log::debug!( "{}: set_method_def {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let keyword_set = self.get_slice(&item_set)?; let id = self.parse_object_property_key()?; @@ -3083,9 +3135,10 @@ where star: Option>, id: PropInitKey<'b>, ) -> Res> { - debug!( + log::debug!( "{}: method_def_cont {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let start = self.look_ahead_position; let prev_yield = self.context.allow_yield; @@ -3129,9 +3182,10 @@ where #[tracing::instrument(level = "trace", skip(self))] fn parse_class_ctor(&mut self, id: PropInitKey<'b>) -> Res> { - debug!( + log::debug!( "{}: parse_class_ctor {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let prev_allow_super_call = self.context.allow_super_call; self.context.allow_super_call = self.context.allow_super; @@ -3153,7 +3207,7 @@ where /// different shapes that `key` could be, including /// identifiers and literals fn is_key(key: &PropKey, other: &str) -> bool { - trace!("is_key: {:?} <-> {}", key, other); + log::trace!("is_key: {:?} <-> {}", key, other); match key { PropKey::Lit(ref l) => match l { Lit::String(ref s) => s.content.source == other, @@ -3176,7 +3230,7 @@ where keyword_async: Slice<'b>, id: PropInitKey<'b>, ) -> Res> { - debug!( + log::debug!( "{}: parse_property_method_async_fn", self.look_ahead.span.start ); @@ -3210,9 +3264,10 @@ where #[tracing::instrument(level = "trace", skip(self))] fn parse_property_method(&mut self, id: PropInitKey<'b>) -> Res> { - debug!( + log::debug!( "{}: parse_property_method {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let start = self.look_ahead_position; let prev_yield = self.context.allow_yield; @@ -3249,9 +3304,10 @@ where keyword_static: Option>, keyword_get: Slice<'b>, ) -> Res> { - debug!( + log::debug!( "{}: parse_getter_method {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let start = self.look_ahead_position; let prev_yield = self.context.allow_yield; @@ -3282,9 +3338,10 @@ where #[tracing::instrument(level = "trace", skip(self))] fn parse_method_body(&mut self, simple: bool, found_restricted: bool) -> Res> { - debug!( + log::debug!( "{}: parse_method_body {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); self.context.set_is_assignment_target(false); self.context.set_is_binding_element(false); @@ -3310,9 +3367,10 @@ where keyword_set: Slice<'b>, key: PropInitKey<'b>, ) -> Res> { - debug!( + log::debug!( "{}: parse_setter_method {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let start = self.look_ahead_position; let prev_allow = self.context.allow_yield; @@ -3362,9 +3420,10 @@ where simple: bool, found_restricted: bool, ) -> Res> { - debug!( + log::debug!( "{}: parse_property_method_fn {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); self.context.set_is_assignment_target(false); self.context.set_is_binding_element(false); @@ -3388,7 +3447,7 @@ where } fn qualified_prop_name(tok: &Token<&str>) -> bool { - debug!("qualified_prop_name",); + log::debug!("qualified_prop_name",); tok.is_ident() || tok.is_keyword() || tok.is_literal() @@ -3397,9 +3456,10 @@ where #[tracing::instrument(level = "trace", skip(self))] fn parse_object_property_key(&mut self) -> Res> { - debug!( + log::debug!( "{}: parse_object_property_key {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let item = self.next_item()?; if matches!(item.token, Token::String(_) | Token::Number(_)) { @@ -3527,9 +3587,10 @@ where #[tracing::instrument(level = "trace", skip(self))] fn parse_primary_expression(&mut self) -> Res> { - debug!( + log::debug!( "{}: parse_primary_expression {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); if self.context.strict && self.look_ahead.token.is_strict_reserved() { return Err(Error::NonStrictFeatureInStrictContext( @@ -3676,9 +3737,10 @@ where #[tracing::instrument(level = "trace", skip(self))] fn parse_group_expr(&mut self) -> Res> { - debug!( + log::debug!( "{}: parse_group_expr {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let open_paren = self.expect_punct(Punct::OpenParen)?; if self.at_punct(Punct::CloseParen) { @@ -3898,9 +3960,10 @@ where #[tracing::instrument(level = "trace", skip(self))] fn parse_array_init(&mut self) -> Res> { - debug!( + log::debug!( "{}: parse_array_init {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let open_bracket = self.expect_punct(Punct::OpenBracket)?; let mut elements = Vec::new(); @@ -3950,9 +4013,10 @@ where #[tracing::instrument(level = "trace", skip(self))] fn parse_obj_init(&mut self) -> Res> { - debug!( + log::debug!( "{}: parse_obj_init {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let start_pos = self.look_ahead_position; let open_brace = self.expect_punct(Punct::OpenBrace)?; @@ -3968,7 +4032,7 @@ where let (found_proto, prop) = self.parse_obj_prop()?; if found_proto { proto_ct += 1; - debug!("found proto: {}", proto_ct); + log::debug!("found proto: {}", proto_ct); } prop }; @@ -3997,9 +4061,10 @@ where #[tracing::instrument(level = "trace", skip(self))] fn parse_obj_prop(&mut self) -> Res<(bool, ObjProp<'b>)> { - debug!( + log::debug!( "{}: parse_obj_prop {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let start = self.look_ahead.clone(); let mut is_proto = false; @@ -4143,7 +4208,7 @@ where } fn is_proto_(key: &PropKey) -> bool { - trace!("is_proto {:?}", key); + log::trace!("is_proto {:?}", key); match key { PropKey::Lit(ref l) => match l { Lit::String(ref s) => s.content.source == "__proto__", @@ -4169,9 +4234,10 @@ where #[tracing::instrument(level = "trace", skip(self))] fn parse_template_lit(&mut self, is_tagged: bool) -> Res> { - debug!( + log::debug!( "{}: parse_template_Lit {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); if !self.look_ahead.token.is_template_head() { return self @@ -4196,9 +4262,10 @@ where #[tracing::instrument(level = "trace", skip(self))] fn parse_template_element(&mut self, is_tagged: bool) -> Res> { - debug!( + log::debug!( "{}: parse_template_element {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); use resast::spanned::{Position, SourceLocation}; let item = self.next_item()?; @@ -4247,9 +4314,10 @@ where #[tracing::instrument(level = "trace", skip(self))] fn parse_function_expr(&mut self) -> Res> { - debug!( + log::debug!( "{}: parse_function_expr {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let start_pos = self.look_ahead_position; let is_async = self.at_contextual_keyword("async"); @@ -4269,9 +4337,9 @@ where let prev_await = self.context.allow_await; let prev_yield = self.context.allow_yield; let prev_super = self.context.allow_super; - debug!("setting allow_super to {}", false); + log::debug!("setting allow_super to {}", false); self.context.set_allow_super(false); - debug!("setting allow_await to {}", is_async); + log::debug!("setting allow_await to {}", is_async); self.context.allow_await = keyword_async.is_none(); self.context.allow_yield = star.is_none(); let mut found_restricted = false; @@ -4335,7 +4403,7 @@ where self.context.allow_strict_directive = prev_allow_strict_directive; self.context.allow_yield = prev_yield; self.context.allow_await = prev_await; - debug!("setting allow_super to {}", prev_super); + log::debug!("setting allow_super to {}", prev_super); self.context.set_allow_super(prev_super); self.remove_scope(); let func = Func { @@ -4353,9 +4421,10 @@ where #[tracing::instrument(level = "trace", skip(self))] fn parse_fn_name(&mut self, is_gen: bool) -> Res> { - debug!( + log::debug!( "{}: parse_fn_name {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); if self.context.strict && !is_gen && self.at_keyword(Keyword::Yield(())) { self.parse_ident_name() @@ -4366,9 +4435,10 @@ where #[tracing::instrument(level = "trace", skip(self))] fn parse_ident_name(&mut self) -> Res> { - debug!( + log::debug!( "{}: parse_ident_name {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let ident = self.next_item()?; match &ident.token { @@ -4382,9 +4452,10 @@ where #[tracing::instrument(level = "trace", skip(self))] fn parse_var_ident(&mut self, is_var: bool) -> Res> { - debug!( + log::debug!( "{}: parse_var_ident {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let ident = self.next_item()?; if ident.token.matches_keyword(Keyword::Yield(())) @@ -4406,7 +4477,7 @@ where && !ident.token.matches_keyword(Keyword::Await(()))) || !is_var { - debug!("strict: {}\nis_strict_reserved: {}, matches_let: {}, matches_await: {}, is_var: {}", + log::debug!("strict: {}\nis_strict_reserved: {}, matches_let: {}, matches_await: {}, is_var: {}", self.context.strict, ident.token.is_strict_reserved(), ident.token.matches_keyword(Keyword::Let(())), @@ -4418,9 +4489,10 @@ where } else if (self.context.is_module || !self.context.allow_await) && &self.original[ident.span.start..ident.span.end] == "await" { - debug!( + log::debug!( "invalid await await: {}, module: {}", - self.context.allow_await, self.context.is_module + self.context.allow_await, + self.context.is_module ); return self.expected_token_error(&ident, &["not `await`"]); } @@ -4447,9 +4519,10 @@ where #[tracing::instrument(level = "trace", skip(self))] fn parse_formal_params(&mut self) -> Res> { - debug!( + log::debug!( "{}: parse_formal_params {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let open_paren = self.expect_punct(Punct::OpenParen)?; let mut args = Vec::new(); @@ -4487,9 +4560,10 @@ where #[tracing::instrument(level = "trace", skip(self))] fn parse_formal_param(&mut self, simple: bool) -> Res<(bool, bool, FuncArg<'b>)> { - debug!( + log::debug!( "{}: parse_formal_param {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let start = self.look_ahead_position; let mut params: Vec> = Vec::new(); @@ -4508,9 +4582,10 @@ where #[tracing::instrument(level = "trace", skip(self))] fn parse_rest_element(&mut self, params: &mut Vec>) -> Res<(bool, RestPat<'b>)> { - debug!( + log::debug!( "{}: parse_rest_element {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let dots = self.expect_punct(Punct::Ellipsis)?; let (restricted, pat) = self.parse_pattern(false, params)?; @@ -4526,9 +4601,10 @@ where #[tracing::instrument(level = "trace", skip(self))] fn parse_binding_rest_el(&mut self, params: &mut Vec>) -> Res> { - debug!( + log::debug!( "{}: parse_binding_rest_el {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let dots = self.expect_punct(Punct::Ellipsis)?; let (_b, pat) = self.parse_pattern(false, params)?; @@ -4540,9 +4616,10 @@ where &mut self, params: &mut Vec>, ) -> Res<(bool, Pat<'b>)> { - debug!( + log::debug!( "{}: parse_pattern_with_default {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let (is_restricted, ret) = self.parse_pattern(true, params)?; if self.at_punct(Punct::Equal) { @@ -4569,9 +4646,10 @@ where is_var: bool, params: &mut Vec>, ) -> Res<(bool, Pat<'b>)> { - debug!( + log::debug!( "{}: parse_pattern {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); if self.at_punct(Punct::OpenBracket) { self.parse_array_pattern(params) @@ -4590,9 +4668,10 @@ where #[tracing::instrument(level = "trace", skip(self))] fn parse_array_pattern(&mut self, params: &mut Vec>) -> Res<(bool, Pat<'b>)> { - debug!( + log::debug!( "{}: parse_array_pattern {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let open_bracket = self.expect_punct(Punct::OpenBracket)?; let mut elements = Vec::new(); @@ -4644,9 +4723,10 @@ where #[tracing::instrument(level = "trace", skip(self))] fn parse_object_pattern(&mut self) -> Res<(bool, Pat<'b>)> { - debug!( + log::debug!( "{}: parse_object_pattern {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let open_brace = self.expect_punct(Punct::OpenBrace)?; let mut body = Vec::new(); @@ -4674,9 +4754,10 @@ where #[tracing::instrument(level = "trace", skip(self))] fn parse_rest_prop(&mut self) -> Res> { - debug!( + log::debug!( "{}: parse_rest_prop {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let dots = self.expect_punct(Punct::Ellipsis)?; let (_, arg) = self.parse_pattern(false, &mut Vec::new())?; @@ -4693,9 +4774,10 @@ where #[tracing::instrument(level = "trace", skip(self))] fn parse_property_pattern(&mut self) -> Res> { - debug!( + log::debug!( "{}: parse_property_pattern {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let (key, colon, value) = if self.look_ahead.token.is_ident() { let ident = self.parse_var_ident(false)?; @@ -4737,9 +4819,10 @@ where #[tracing::instrument(level = "trace", skip(self))] fn parse_assignment_expr(&mut self) -> Res> { - debug!( + log::debug!( "{}: parse_assignment_expr {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); if !self.context.allow_yield && self.at_keyword(Keyword::Yield(())) { self.parse_yield_expr() @@ -4778,9 +4861,10 @@ where }; current = Expr::ArrowParamPlaceHolder(inner); } - debug!( + log::debug!( "current expression: {:?} {}", - current, self.context.allow_yield + current, + self.context.allow_yield ); if self.at_punct(Punct::EqualGreaterThan) { self.context.set_is_assignment_target(false); @@ -5221,9 +5305,10 @@ where #[tracing::instrument(level = "trace", skip(self))] fn reinterpret_expr_as_pat(&self, ex: Expr<'b>) -> Res> { - debug!( + log::debug!( "{}: reinterpret_expr_as_pat {:?}", - self.look_ahead.span.start, ex + self.look_ahead.span.start, + ex ); match ex { Expr::Array(a) => { @@ -5300,9 +5385,10 @@ where #[tracing::instrument(level = "trace", skip(self))] fn reinterpret_array_pat_part(&self, part: Expr<'b>) -> Res> { - debug!( + log::debug!( "{}: reinterpret_array_pat_part {:?}", - self.look_ahead.span.start, part + self.look_ahead.span.start, + part ); let ret = if let Expr::Spread(spread) = part { if Self::is_reinterpret_target(&spread.expr) { @@ -5377,9 +5463,10 @@ where #[tracing::instrument(level = "trace", skip(self))] fn parse_yield_expr(&mut self) -> Res> { - debug!( + log::debug!( "{}: parse_yield_expr {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let keyword = self.expect_keyword(Keyword::Yield(()))?; let mut argument: Option> = None; @@ -5406,9 +5493,10 @@ where #[tracing::instrument(level = "trace", skip(self))] fn parse_conditional_expr(&mut self) -> Res> { - debug!( + log::debug!( "{}: parse_conditional_expr {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let expr = inherit_cover_grammar!(self, parse_binary_expression)?; if self.at_punct(Punct::QuestionMark) { @@ -5437,9 +5525,10 @@ where #[tracing::instrument(level = "trace", skip(self))] fn parse_binary_expression(&mut self) -> Res> { - debug!( + log::debug!( "{}: parse_binary_expression {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let mut current = inherit_cover_grammar!(self, parse_exponentiation_expression)?; let token = self.look_ahead.clone(); @@ -5462,7 +5551,7 @@ where right = stack.pop().ok_or_else(|| { self.op_error("invalid binary operation, no right expr in stack") })?; - debug!("right: {:#?} {}", right, self.context.allow_yield); + log::debug!("right: {:#?} {}", right, self.context.allow_yield); let op = ops.pop().ok_or_else(|| { self.op_error("invalid binary operation, too few operators") })?; @@ -5470,7 +5559,7 @@ where left = stack.pop().ok_or_else(|| { self.op_error("invalid binary operation, no left expr in stack") })?; - debug!("left: {:#?} {}", left, self.context.allow_yield); + log::debug!("left: {:#?} {}", left, self.context.allow_yield); if op.token.matches_punct(Punct::DoubleAmpersand) || op.token.matches_punct(Punct::DoublePipe) { @@ -5537,7 +5626,7 @@ where #[tracing::instrument(level = "trace", skip(self))] fn parse_exponentiation_expression(&mut self) -> Res> { - debug!( + log::debug!( "{}: parse_exponentiation_expression", self.look_ahead.span.start ); @@ -5568,9 +5657,11 @@ where #[tracing::instrument(level = "trace", skip(self))] fn parse_unary_expression(&mut self) -> Res> { - debug!( + log::debug!( "{}: parse_unary_expression {:?} allow_await: {}", - self.look_ahead.span.start, self.look_ahead.token, self.context.allow_await + self.look_ahead.span.start, + self.look_ahead.token, + self.context.allow_await ); if self.at_punct(Punct::Plus) || self.at_punct(Punct::Dash) @@ -5600,7 +5691,7 @@ where }; Ok(Expr::Unary(unary)) } else if !self.context.allow_await && self.at_keyword(Keyword::Await(())) { - debug!("parsing await expr"); + log::debug!("parsing await expr"); self.parse_await_expr() } else { self.parse_update_expr() @@ -5676,9 +5767,10 @@ where #[tracing::instrument(level = "trace", skip(self))] fn parse_await_expr(&mut self) -> Res> { - debug!( + log::debug!( "{}: parse_await_expr {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); if self.context.allow_await { self.unexpected_token_error(&self.look_ahead, "await is not valid in this context")?; @@ -5692,9 +5784,10 @@ where #[tracing::instrument(level = "trace", skip(self))] fn parse_update_expr(&mut self) -> Res> { - debug!( + log::debug!( "{}: parse_update_expr {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let start = self.look_ahead.clone(); if self.at_punct(Punct::DoublePlus) || self.at_punct(Punct::DoubleDash) { @@ -5825,7 +5918,7 @@ where object: Box::new(expr), property: Box::new(prop), }; - debug!(target: "look_ahead", "{:?}", member); + log::debug!(target: "look_ahead", "{:?}", member); expr = Expr::Member(member); } else if self.at_punct(Punct::Period) { self.context.set_is_binding_element(false); @@ -5879,7 +5972,7 @@ where #[tracing::instrument(level = "trace", skip(self))] fn parse_left_hand_side_expr_allow_call(&mut self) -> Res> { - debug!( + log::debug!( "{}: parse_left_hand_side_expr_allow_call", self.look_ahead.span.start ); @@ -5910,7 +6003,7 @@ where property: Box::new(prop), indexer, }); - debug!(target: "look_ahead", "1 {:?}", expr); + log::debug!(target: "look_ahead", "1 {:?}", expr); } else if self.at_punct(Punct::OpenParen) { let current_pos = self.look_ahead_position; let async_arrow = is_async && start_pos.line == current_pos.line; @@ -5964,7 +6057,7 @@ where indexer, property: Box::new(prop), }; - debug!(target: "look_ahead", "{:?}", member); + log::debug!(target: "look_ahead", "{:?}", member); expr = Expr::Member(member); } else if self.look_ahead.token.is_template_head() { let quasi = self.parse_template_lit(true)?; @@ -5983,9 +6076,10 @@ where /// Parse the arguments of an async function #[tracing::instrument(level = "trace", skip(self))] fn parse_async_args(&mut self) -> Res<(Slice<'b>, Vec>>, Slice<'b>)> { - debug!( + log::debug!( "{}: parse_async_args {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let open_paren = self.expect_punct(Punct::OpenParen)?; let prev_await = self.context.allow_await; @@ -6030,9 +6124,10 @@ where /// note: not sure this is needed #[tracing::instrument(level = "trace", skip(self))] fn parse_async_arg(&mut self) -> Res> { - debug!( + log::debug!( "{}: parse_async_arg {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let expr = self.parse_assignment_expr()?; self.context.first_covert_initialized_name_error = None; @@ -6043,9 +6138,10 @@ where /// a non-existent comma #[tracing::instrument(level = "trace", skip(self))] fn expect_comma_sep(&mut self) -> Res> { - debug!( + log::debug!( "{}: expect_comma_sep {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); self.expect_punct(Punct::Comma) } @@ -6053,9 +6149,10 @@ where /// Parse an expression preceded by the `...` operator #[tracing::instrument(level = "trace", skip(self))] fn parse_spread_element(&mut self) -> Res> { - debug!( + log::debug!( "{}: parse_spread_element {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let dots = self.expect_punct(Punct::Ellipsis)?; let expr = inherit_cover_grammar!(self, parse_assignment_expr)?; @@ -6065,9 +6162,10 @@ where /// Parse function arguments, expecting to open with `(` and close with `)` #[tracing::instrument(level = "trace", skip(self))] fn parse_args(&mut self) -> Res<(Slice<'b>, Vec>>, Slice<'b>)> { - debug!( + log::debug!( "{}: parse_args {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let open_paren = self.expect_punct(Punct::OpenParen)?; let mut args = Vec::new(); @@ -6101,9 +6199,10 @@ where /// body #[tracing::instrument(level = "trace", skip(self))] fn parse_new_expr(&mut self) -> Res> { - debug!( + log::debug!( "{}: parse_new_expr {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); let item = self.next_item()?; if let Token::Keyword(ref key) = &item.token { @@ -6230,7 +6329,7 @@ where /// and return the last token #[tracing::instrument(level = "trace", skip(self))] fn next_item(&mut self) -> Res> { - trace!("next_item {}", self.context.has_line_term); + log::trace!("next_item {}", self.context.has_line_term); let mut comment_line_term = false; loop { self.context.has_line_term = comment_line_term || self.scanner.has_pending_new_line(); @@ -6242,11 +6341,11 @@ where look_ahead.token, self.scanner.string_for(&look_ahead.span) ); - debug!("look_ahead: {:?}", self._look_ahead); + log::debug!("look_ahead: {:?}", self._look_ahead); } self.look_ahead_position = look_ahead.location.start; if look_ahead.token.is_comment() { - trace!( + log::trace!( "next_item comment {} {:?}", self.context.has_line_term, look_ahead.token @@ -6361,9 +6460,10 @@ where } #[tracing::instrument(level = "trace", skip(self))] fn at_import_call(&mut self) -> Res { - debug!( + log::debug!( "{}: at_import_call {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); if self.at_keyword(Keyword::Import(())) { let state = self.scanner.get_state(); @@ -6474,9 +6574,10 @@ where /// next token would need to be on the same line #[tracing::instrument(level = "trace", skip(self))] fn at_async_function(&mut self) -> bool { - debug!( + log::debug!( "{}: at_async_function {:?}", - self.look_ahead.span.start, self.look_ahead.token + self.look_ahead.span.start, + self.look_ahead.token ); if self.at_contextual_keyword("async") { !self.scanner.has_pending_new_line() @@ -6499,7 +6600,7 @@ where /// EoF or a close brace #[tracing::instrument(level = "trace", skip(self))] fn consume_semicolon(&mut self) -> Res>> { - trace!("consume_semicolon {}", self.context.has_line_term); + log::trace!("consume_semicolon {}", self.context.has_line_term); if self.at_punct(Punct::SemiColon) { let semi = self.next_item()?; return Ok(self.slice_from(&semi)); @@ -6514,7 +6615,7 @@ where /// Tests if a token matches an &str that might represent /// a contextual keyword like `async` fn at_contextual_keyword(&self, s: &str) -> bool { - debug!("at_contextual_keyword {:?}", s); + log::debug!("at_contextual_keyword {:?}", s); if let Ok(slice) = self.get_slice(&self.look_ahead) { slice.source == s } else { @@ -6796,7 +6897,7 @@ where } pub(crate) fn next_part(&mut self) -> Res> { - trace!( + log::trace!( "next_part past_prolog: {}, strict: {}", self.context.past_prolog, self.context.strict diff --git a/tests/all/ecma262.rs b/tests/all/ecma262.rs index eb57232..ff29f68 100644 --- a/tests/all/ecma262.rs +++ b/tests/all/ecma262.rs @@ -8,7 +8,7 @@ fn es5() { let _ = env_logger::builder().is_test(true).try_init().ok(); info!("ES5"); let path = Lib::Everything(EverythingVersion::Es5).path(); - debug!("path: {:?}", path); + log::debug!("path: {:?}", path); let js = get_js_file(&path).unwrap_or_else(|e| panic!("Faield to get {:?}\n{}", path, e)); let mut p = Parser::new(&js).expect("Failed to create parser"); let tokens = p.parse().unwrap(); From 92201afd454bdc97e1546413bf8c82d6cce8a53b Mon Sep 17 00:00:00 2001 From: Robert Masen Date: Thu, 24 Nov 2022 12:02:55 -0600 Subject: [PATCH 12/22] chore: update to 2021 edition --- Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cargo.toml b/Cargo.toml index ca90a89..e8a6f73 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -8,7 +8,7 @@ license = "MIT" readme = "./README.md" keywords = ["JavaScript", "parsing", "JS", "ES", "ECMA"] categories = ["parsing", "text-processing", "web-programming"] -edition = "2018" +edition = "2021" [dependencies] hash-chain = "0.3" From 367faac291d78bd0f8fae505da5abe9ae6a1abdb Mon Sep 17 00:00:00 2001 From: Robert Masen Date: Thu, 24 Nov 2022 12:05:17 -0600 Subject: [PATCH 13/22] chore: update checkout action to v3 --- .github/workflows/rust.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index 68d0e37..4b267a0 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -8,7 +8,7 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Setup Node.js for use with actions uses: actions/setup-node@v3 - name: install js test libs from npm From fbdf96589360b80ef90b713c252da7c7272e8777 Mon Sep 17 00:00:00 2001 From: Robert Masen Date: Thu, 24 Nov 2022 12:06:51 -0600 Subject: [PATCH 14/22] chore: update cache action to v3 --- .github/workflows/rust.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index 4b267a0..e805593 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -37,14 +37,14 @@ jobs: - name: Check syntax run: cargo fmt --all -- --check - name: Cache node_modules - uses: actions/cache@v1.0.3 + uses: actions/cache@v3 with: path: ./node_modules key: ${{ runner.os }}.node_modules - name: before cargo cache run: rm -rf ~/.cargo/registry - name: Cache cargo directory - uses: actions/cache@v1.0.3 + uses: actions/cache@v3 with: key: ${{ runner.os }}.cargo path: ~/.cargo From 0ef0fd379685285cd6370d877965725420c15e40 Mon Sep 17 00:00:00 2001 From: Robert Masen Date: Thu, 24 Nov 2022 12:28:06 -0600 Subject: [PATCH 15/22] remove stale email address from `authors` --- Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cargo.toml b/Cargo.toml index e8a6f73..b28467a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "ressa" version = "0.8.0" -authors = ["Robert Masen "] +authors = ["Robert Masen "] repository = "https://github.com/rusty-ecma/RESSA" description = "An ECMAscript parser" license = "MIT" From 77581072b3d1cce0d4378a25e2e2ebdb8768910a Mon Sep 17 00:00:00 2001 From: Robert Masen Date: Thu, 24 Nov 2022 12:31:14 -0600 Subject: [PATCH 16/22] chore: move valid scripts to /scripts --- run_test262.ps1 | 3 --- run_test262.sh | 3 --- run_moz_central_test.sh => scripts/run_moz_central_test.sh | 0 3 files changed, 6 deletions(-) delete mode 100644 run_test262.ps1 delete mode 100755 run_test262.sh rename run_moz_central_test.sh => scripts/run_moz_central_test.sh (100%) diff --git a/run_test262.ps1 b/run_test262.ps1 deleted file mode 100644 index 651f1fe..0000000 --- a/run_test262.ps1 +++ /dev/null @@ -1,3 +0,0 @@ -$env:RUST_MIN_STACK=9999999 -$env:RESSA_WRITE_FAILURES=1 -cargo test262 \ No newline at end of file diff --git a/run_test262.sh b/run_test262.sh deleted file mode 100755 index 170e97a..0000000 --- a/run_test262.sh +++ /dev/null @@ -1,3 +0,0 @@ -export RUST_MIN_STACK=9999999 -export RESSA_WRITE_FAILURES=1 -cargo test262 #|| node ./prepFailures.js && rsync -r ./failures/test262/ rfm@45.55.78.145:~/projects/wiredforge.com/public/test262/ \ No newline at end of file diff --git a/run_moz_central_test.sh b/scripts/run_moz_central_test.sh similarity index 100% rename from run_moz_central_test.sh rename to scripts/run_moz_central_test.sh From 8fa3fae42b5bc15b26749612f13aedc357937c3a Mon Sep 17 00:00:00 2001 From: Robert Masen Date: Thu, 24 Nov 2022 12:31:43 -0600 Subject: [PATCH 17/22] chore: remove .cargo directory --- .cargo/config | 4 ---- 1 file changed, 4 deletions(-) delete mode 100644 .cargo/config diff --git a/.cargo/config b/.cargo/config deleted file mode 100644 index 25bec2b..0000000 --- a/.cargo/config +++ /dev/null @@ -1,4 +0,0 @@ -[alias] -test262 = "test --features test_262 --test test262 -- test262" -moz = "test --features moz_central -- --nocapture moz_central" -test_all = "test --features=test_262,moz_central" From ca165c06201f6043aadf993b2db60fd17c1e2d2d Mon Sep 17 00:00:00 2001 From: Robert Masen Date: Thu, 24 Nov 2022 12:49:14 -0600 Subject: [PATCH 18/22] chore: invert integration tests This moves from a single integration tests `all` to one test module per .rs file with a common module for the ecma262 and major_libs --- tests/{all => }/comment_handler.rs | 0 tests/{all => }/ecma262.rs | 10 +++++----- tests/{all/main.rs => libs_common.rs} | 18 +++--------------- tests/{all => }/major_libs.rs | 8 +++++--- .../ecma262__es2015_module-10.snap} | 2 +- .../ecma262__es2015_module-2.snap} | 2 +- .../ecma262__es2015_module-3.snap} | 2 +- .../ecma262__es2015_module-4.snap} | 2 +- .../ecma262__es2015_module-5.snap} | 2 +- .../ecma262__es2015_module-6.snap} | 2 +- .../ecma262__es2015_module-7.snap} | 2 +- .../ecma262__es2015_module-8.snap} | 2 +- .../ecma262__es2015_module-9.snap} | 2 +- .../ecma262__es2015_module.snap} | 2 +- .../ecma262__es2015_script.snap} | 2 +- .../ecma262__es5.snap} | 2 +- tests/{all => }/snippets.rs | 0 tests/{all => }/spider_monkey.rs | 3 ++- 18 files changed, 27 insertions(+), 36 deletions(-) rename tests/{all => }/comment_handler.rs (100%) rename tests/{all => }/ecma262.rs (93%) rename tests/{all/main.rs => libs_common.rs} (90%) rename tests/{all => }/major_libs.rs (95%) rename tests/{all/snapshots/all__ecma262__es2015_module-10.snap => snapshots/ecma262__es2015_module-10.snap} (95%) rename tests/{all/snapshots/all__ecma262__es2015_module-2.snap => snapshots/ecma262__es2015_module-2.snap} (94%) rename tests/{all/snapshots/all__ecma262__es2015_module-3.snap => snapshots/ecma262__es2015_module-3.snap} (95%) rename tests/{all/snapshots/all__ecma262__es2015_module-4.snap => snapshots/ecma262__es2015_module-4.snap} (94%) rename tests/{all/snapshots/all__ecma262__es2015_module-5.snap => snapshots/ecma262__es2015_module-5.snap} (95%) rename tests/{all/snapshots/all__ecma262__es2015_module-6.snap => snapshots/ecma262__es2015_module-6.snap} (96%) rename tests/{all/snapshots/all__ecma262__es2015_module-7.snap => snapshots/ecma262__es2015_module-7.snap} (94%) rename tests/{all/snapshots/all__ecma262__es2015_module-8.snap => snapshots/ecma262__es2015_module-8.snap} (96%) rename tests/{all/snapshots/all__ecma262__es2015_module-9.snap => snapshots/ecma262__es2015_module-9.snap} (90%) rename tests/{all/snapshots/all__ecma262__es2015_module.snap => snapshots/ecma262__es2015_module.snap} (99%) rename tests/{all/snapshots/all__ecma262__es2015_script.snap => snapshots/ecma262__es2015_script.snap} (99%) rename tests/{all/snapshots/all__ecma262__es5.snap => snapshots/ecma262__es5.snap} (99%) rename tests/{all => }/snippets.rs (100%) rename tests/{all => }/spider_monkey.rs (99%) diff --git a/tests/all/comment_handler.rs b/tests/comment_handler.rs similarity index 100% rename from tests/all/comment_handler.rs rename to tests/comment_handler.rs diff --git a/tests/all/ecma262.rs b/tests/ecma262.rs similarity index 93% rename from tests/all/ecma262.rs rename to tests/ecma262.rs index ff29f68..2c0137e 100644 --- a/tests/all/ecma262.rs +++ b/tests/ecma262.rs @@ -1,12 +1,12 @@ -#![cfg(test)] -use super::{get_js_file, EverythingVersion, Lib}; +mod libs_common; use env_logger; +use libs_common::{get_js_file, EverythingVersion, Lib}; use ressa::Parser; #[test] fn es5() { let _ = env_logger::builder().is_test(true).try_init().ok(); - info!("ES5"); + log::info!("ES5"); let path = Lib::Everything(EverythingVersion::Es5).path(); log::debug!("path: {:?}", path); let js = get_js_file(&path).unwrap_or_else(|e| panic!("Faield to get {:?}\n{}", path, e)); @@ -18,7 +18,7 @@ fn es5() { #[test] fn es2015_script() { let _ = env_logger::builder().is_test(true).try_init().ok(); - info!("ES2015 Script"); + log::info!("ES2015 Script"); let path = Lib::Everything(EverythingVersion::Es2015Script).path(); let js = get_js_file(&path).expect(&format!("Failed to get {:?}", path)); let mut p = Parser::new(&js).expect("Failed to create parser"); @@ -28,7 +28,7 @@ fn es2015_script() { #[test] fn es2015_module() { - info!("ES2015 Module"); + log::info!("ES2015 Module"); let _ = env_logger::builder().is_test(true).try_init().ok(); let path = Lib::Everything(EverythingVersion::Es2015Module).path(); let js = get_js_file(&path).expect(&format!("Failed to get {:?}", path)); diff --git a/tests/all/main.rs b/tests/libs_common.rs similarity index 90% rename from tests/all/main.rs rename to tests/libs_common.rs index 6ae4167..8cc9ab6 100644 --- a/tests/all/main.rs +++ b/tests/libs_common.rs @@ -1,20 +1,8 @@ -extern crate env_logger; -#[macro_use] -extern crate log; -extern crate ress; -extern crate ressa; - -mod comment_handler; -mod ecma262; -mod major_libs; -mod snippets; -#[cfg(feature = "moz_central")] -mod spider_monkey; - +#![allow(unused)] use std::{fs::read_to_string, io::Error}; #[derive(Clone, Copy, Debug)] -enum Lib { +pub enum Lib { Jquery, Angular, React, @@ -25,7 +13,7 @@ enum Lib { Everything(EverythingVersion), } #[derive(Clone, Copy, Debug)] -enum EverythingVersion { +pub enum EverythingVersion { Es5, Es2015Module, Es2015Script, diff --git a/tests/all/major_libs.rs b/tests/major_libs.rs similarity index 95% rename from tests/all/major_libs.rs rename to tests/major_libs.rs index 10553ef..90c2a16 100644 --- a/tests/all/major_libs.rs +++ b/tests/major_libs.rs @@ -2,7 +2,9 @@ use env_logger; use ressa::Parser; -use super::{get_js_file, Lib}; +mod libs_common; + +use libs_common::Lib; #[test] fn angular1() { @@ -84,12 +86,12 @@ fn get_js(l: Lib) -> Result<(String, String), ::std::io::Error> { } fn get_normal_js(l: Lib) -> Result { - get_js_file(l.path()) + libs_common::get_js_file(l.path()) } fn get_min_js(l: Lib) -> Result { if let Some(p) = l.min_path() { - get_js_file(&p) + libs_common::get_js_file(&p) } else { Err(::std::io::Error::new( ::std::io::ErrorKind::NotFound, diff --git a/tests/all/snapshots/all__ecma262__es2015_module-10.snap b/tests/snapshots/ecma262__es2015_module-10.snap similarity index 95% rename from tests/all/snapshots/all__ecma262__es2015_module-10.snap rename to tests/snapshots/ecma262__es2015_module-10.snap index cf6e828..dbbb1dd 100644 --- a/tests/all/snapshots/all__ecma262__es2015_module-10.snap +++ b/tests/snapshots/ecma262__es2015_module-10.snap @@ -1,5 +1,5 @@ --- -source: tests/all/ecma262.rs +source: tests/ecma262.rs expression: res --- [ diff --git a/tests/all/snapshots/all__ecma262__es2015_module-2.snap b/tests/snapshots/ecma262__es2015_module-2.snap similarity index 94% rename from tests/all/snapshots/all__ecma262__es2015_module-2.snap rename to tests/snapshots/ecma262__es2015_module-2.snap index 314abee..0686aa4 100644 --- a/tests/all/snapshots/all__ecma262__es2015_module-2.snap +++ b/tests/snapshots/ecma262__es2015_module-2.snap @@ -1,5 +1,5 @@ --- -source: tests/all/ecma262.rs +source: tests/ecma262.rs expression: res --- [ diff --git a/tests/all/snapshots/all__ecma262__es2015_module-3.snap b/tests/snapshots/ecma262__es2015_module-3.snap similarity index 95% rename from tests/all/snapshots/all__ecma262__es2015_module-3.snap rename to tests/snapshots/ecma262__es2015_module-3.snap index 4232367..dc77253 100644 --- a/tests/all/snapshots/all__ecma262__es2015_module-3.snap +++ b/tests/snapshots/ecma262__es2015_module-3.snap @@ -1,5 +1,5 @@ --- -source: tests/all/ecma262.rs +source: tests/ecma262.rs expression: res --- [ diff --git a/tests/all/snapshots/all__ecma262__es2015_module-4.snap b/tests/snapshots/ecma262__es2015_module-4.snap similarity index 94% rename from tests/all/snapshots/all__ecma262__es2015_module-4.snap rename to tests/snapshots/ecma262__es2015_module-4.snap index a33b8d5..a83727f 100644 --- a/tests/all/snapshots/all__ecma262__es2015_module-4.snap +++ b/tests/snapshots/ecma262__es2015_module-4.snap @@ -1,5 +1,5 @@ --- -source: tests/all/ecma262.rs +source: tests/ecma262.rs expression: res --- [ diff --git a/tests/all/snapshots/all__ecma262__es2015_module-5.snap b/tests/snapshots/ecma262__es2015_module-5.snap similarity index 95% rename from tests/all/snapshots/all__ecma262__es2015_module-5.snap rename to tests/snapshots/ecma262__es2015_module-5.snap index 9e1c846..f515072 100644 --- a/tests/all/snapshots/all__ecma262__es2015_module-5.snap +++ b/tests/snapshots/ecma262__es2015_module-5.snap @@ -1,5 +1,5 @@ --- -source: tests/all/ecma262.rs +source: tests/ecma262.rs expression: res --- [ diff --git a/tests/all/snapshots/all__ecma262__es2015_module-6.snap b/tests/snapshots/ecma262__es2015_module-6.snap similarity index 96% rename from tests/all/snapshots/all__ecma262__es2015_module-6.snap rename to tests/snapshots/ecma262__es2015_module-6.snap index 45e8905..2f9c9ef 100644 --- a/tests/all/snapshots/all__ecma262__es2015_module-6.snap +++ b/tests/snapshots/ecma262__es2015_module-6.snap @@ -1,5 +1,5 @@ --- -source: tests/all/ecma262.rs +source: tests/ecma262.rs expression: res --- [ diff --git a/tests/all/snapshots/all__ecma262__es2015_module-7.snap b/tests/snapshots/ecma262__es2015_module-7.snap similarity index 94% rename from tests/all/snapshots/all__ecma262__es2015_module-7.snap rename to tests/snapshots/ecma262__es2015_module-7.snap index 436b124..8a4f490 100644 --- a/tests/all/snapshots/all__ecma262__es2015_module-7.snap +++ b/tests/snapshots/ecma262__es2015_module-7.snap @@ -1,5 +1,5 @@ --- -source: tests/all/ecma262.rs +source: tests/ecma262.rs expression: res --- [ diff --git a/tests/all/snapshots/all__ecma262__es2015_module-8.snap b/tests/snapshots/ecma262__es2015_module-8.snap similarity index 96% rename from tests/all/snapshots/all__ecma262__es2015_module-8.snap rename to tests/snapshots/ecma262__es2015_module-8.snap index da0b812..7155194 100644 --- a/tests/all/snapshots/all__ecma262__es2015_module-8.snap +++ b/tests/snapshots/ecma262__es2015_module-8.snap @@ -1,5 +1,5 @@ --- -source: tests/all/ecma262.rs +source: tests/ecma262.rs expression: res --- [ diff --git a/tests/all/snapshots/all__ecma262__es2015_module-9.snap b/tests/snapshots/ecma262__es2015_module-9.snap similarity index 90% rename from tests/all/snapshots/all__ecma262__es2015_module-9.snap rename to tests/snapshots/ecma262__es2015_module-9.snap index 97b846c..31eccad 100644 --- a/tests/all/snapshots/all__ecma262__es2015_module-9.snap +++ b/tests/snapshots/ecma262__es2015_module-9.snap @@ -1,5 +1,5 @@ --- -source: tests/all/ecma262.rs +source: tests/ecma262.rs expression: res --- [ diff --git a/tests/all/snapshots/all__ecma262__es2015_module.snap b/tests/snapshots/ecma262__es2015_module.snap similarity index 99% rename from tests/all/snapshots/all__ecma262__es2015_module.snap rename to tests/snapshots/ecma262__es2015_module.snap index cfe86f2..b0721f4 100644 --- a/tests/all/snapshots/all__ecma262__es2015_module.snap +++ b/tests/snapshots/ecma262__es2015_module.snap @@ -1,5 +1,5 @@ --- -source: tests/all/ecma262.rs +source: tests/ecma262.rs expression: tokens --- Mod( diff --git a/tests/all/snapshots/all__ecma262__es2015_script.snap b/tests/snapshots/ecma262__es2015_script.snap similarity index 99% rename from tests/all/snapshots/all__ecma262__es2015_script.snap rename to tests/snapshots/ecma262__es2015_script.snap index 4201bee..18de74d 100644 --- a/tests/all/snapshots/all__ecma262__es2015_script.snap +++ b/tests/snapshots/ecma262__es2015_script.snap @@ -1,5 +1,5 @@ --- -source: tests/all/ecma262.rs +source: tests/ecma262.rs expression: tokens --- Script( diff --git a/tests/all/snapshots/all__ecma262__es5.snap b/tests/snapshots/ecma262__es5.snap similarity index 99% rename from tests/all/snapshots/all__ecma262__es5.snap rename to tests/snapshots/ecma262__es5.snap index c7b7f3b..193a62c 100644 --- a/tests/all/snapshots/all__ecma262__es5.snap +++ b/tests/snapshots/ecma262__es5.snap @@ -1,5 +1,5 @@ --- -source: tests/all/ecma262.rs +source: tests/ecma262.rs expression: tokens --- Script( diff --git a/tests/all/snippets.rs b/tests/snippets.rs similarity index 100% rename from tests/all/snippets.rs rename to tests/snippets.rs diff --git a/tests/all/spider_monkey.rs b/tests/spider_monkey.rs similarity index 99% rename from tests/all/spider_monkey.rs rename to tests/spider_monkey.rs index ff6fa7d..0bfa356 100644 --- a/tests/all/spider_monkey.rs +++ b/tests/spider_monkey.rs @@ -1,4 +1,5 @@ -#![cfg(test)] +#![cfg(feature = "moz_central")] + use ressa::{Builder, Error}; use std::path::Path; use walkdir::WalkDir; From 438e9b3504f224c19b391b57613c2af54c2f1fe3 Mon Sep 17 00:00:00 2001 From: Robert Masen Date: Thu, 24 Nov 2022 12:51:02 -0600 Subject: [PATCH 19/22] chore: rename ecma262 test to everything_js --- tests/{ecma262.rs => everything_js.rs} | 0 ...2015_module-10.snap => everything_js__es2015_module-10.snap} | 2 +- ...es2015_module-2.snap => everything_js__es2015_module-2.snap} | 2 +- ...es2015_module-3.snap => everything_js__es2015_module-3.snap} | 2 +- ...es2015_module-4.snap => everything_js__es2015_module-4.snap} | 2 +- ...es2015_module-5.snap => everything_js__es2015_module-5.snap} | 2 +- ...es2015_module-6.snap => everything_js__es2015_module-6.snap} | 2 +- ...es2015_module-7.snap => everything_js__es2015_module-7.snap} | 2 +- ...es2015_module-8.snap => everything_js__es2015_module-8.snap} | 2 +- ...es2015_module-9.snap => everything_js__es2015_module-9.snap} | 2 +- ...62__es2015_module.snap => everything_js__es2015_module.snap} | 2 +- ...62__es2015_script.snap => everything_js__es2015_script.snap} | 2 +- tests/snapshots/{ecma262__es5.snap => everything_js__es5.snap} | 2 +- 13 files changed, 12 insertions(+), 12 deletions(-) rename tests/{ecma262.rs => everything_js.rs} (100%) rename tests/snapshots/{ecma262__es2015_module-10.snap => everything_js__es2015_module-10.snap} (95%) rename tests/snapshots/{ecma262__es2015_module-2.snap => everything_js__es2015_module-2.snap} (94%) rename tests/snapshots/{ecma262__es2015_module-3.snap => everything_js__es2015_module-3.snap} (95%) rename tests/snapshots/{ecma262__es2015_module-4.snap => everything_js__es2015_module-4.snap} (94%) rename tests/snapshots/{ecma262__es2015_module-5.snap => everything_js__es2015_module-5.snap} (95%) rename tests/snapshots/{ecma262__es2015_module-6.snap => everything_js__es2015_module-6.snap} (96%) rename tests/snapshots/{ecma262__es2015_module-7.snap => everything_js__es2015_module-7.snap} (93%) rename tests/snapshots/{ecma262__es2015_module-8.snap => everything_js__es2015_module-8.snap} (96%) rename tests/snapshots/{ecma262__es2015_module-9.snap => everything_js__es2015_module-9.snap} (90%) rename tests/snapshots/{ecma262__es2015_module.snap => everything_js__es2015_module.snap} (99%) rename tests/snapshots/{ecma262__es2015_script.snap => everything_js__es2015_script.snap} (99%) rename tests/snapshots/{ecma262__es5.snap => everything_js__es5.snap} (99%) diff --git a/tests/ecma262.rs b/tests/everything_js.rs similarity index 100% rename from tests/ecma262.rs rename to tests/everything_js.rs diff --git a/tests/snapshots/ecma262__es2015_module-10.snap b/tests/snapshots/everything_js__es2015_module-10.snap similarity index 95% rename from tests/snapshots/ecma262__es2015_module-10.snap rename to tests/snapshots/everything_js__es2015_module-10.snap index dbbb1dd..bdddfb0 100644 --- a/tests/snapshots/ecma262__es2015_module-10.snap +++ b/tests/snapshots/everything_js__es2015_module-10.snap @@ -1,5 +1,5 @@ --- -source: tests/ecma262.rs +source: tests/everything_js.rs expression: res --- [ diff --git a/tests/snapshots/ecma262__es2015_module-2.snap b/tests/snapshots/everything_js__es2015_module-2.snap similarity index 94% rename from tests/snapshots/ecma262__es2015_module-2.snap rename to tests/snapshots/everything_js__es2015_module-2.snap index 0686aa4..3ee16e1 100644 --- a/tests/snapshots/ecma262__es2015_module-2.snap +++ b/tests/snapshots/everything_js__es2015_module-2.snap @@ -1,5 +1,5 @@ --- -source: tests/ecma262.rs +source: tests/everything_js.rs expression: res --- [ diff --git a/tests/snapshots/ecma262__es2015_module-3.snap b/tests/snapshots/everything_js__es2015_module-3.snap similarity index 95% rename from tests/snapshots/ecma262__es2015_module-3.snap rename to tests/snapshots/everything_js__es2015_module-3.snap index dc77253..db334f9 100644 --- a/tests/snapshots/ecma262__es2015_module-3.snap +++ b/tests/snapshots/everything_js__es2015_module-3.snap @@ -1,5 +1,5 @@ --- -source: tests/ecma262.rs +source: tests/everything_js.rs expression: res --- [ diff --git a/tests/snapshots/ecma262__es2015_module-4.snap b/tests/snapshots/everything_js__es2015_module-4.snap similarity index 94% rename from tests/snapshots/ecma262__es2015_module-4.snap rename to tests/snapshots/everything_js__es2015_module-4.snap index a83727f..4c0d8bb 100644 --- a/tests/snapshots/ecma262__es2015_module-4.snap +++ b/tests/snapshots/everything_js__es2015_module-4.snap @@ -1,5 +1,5 @@ --- -source: tests/ecma262.rs +source: tests/everything_js.rs expression: res --- [ diff --git a/tests/snapshots/ecma262__es2015_module-5.snap b/tests/snapshots/everything_js__es2015_module-5.snap similarity index 95% rename from tests/snapshots/ecma262__es2015_module-5.snap rename to tests/snapshots/everything_js__es2015_module-5.snap index f515072..f4dab22 100644 --- a/tests/snapshots/ecma262__es2015_module-5.snap +++ b/tests/snapshots/everything_js__es2015_module-5.snap @@ -1,5 +1,5 @@ --- -source: tests/ecma262.rs +source: tests/everything_js.rs expression: res --- [ diff --git a/tests/snapshots/ecma262__es2015_module-6.snap b/tests/snapshots/everything_js__es2015_module-6.snap similarity index 96% rename from tests/snapshots/ecma262__es2015_module-6.snap rename to tests/snapshots/everything_js__es2015_module-6.snap index 2f9c9ef..e28e84f 100644 --- a/tests/snapshots/ecma262__es2015_module-6.snap +++ b/tests/snapshots/everything_js__es2015_module-6.snap @@ -1,5 +1,5 @@ --- -source: tests/ecma262.rs +source: tests/everything_js.rs expression: res --- [ diff --git a/tests/snapshots/ecma262__es2015_module-7.snap b/tests/snapshots/everything_js__es2015_module-7.snap similarity index 93% rename from tests/snapshots/ecma262__es2015_module-7.snap rename to tests/snapshots/everything_js__es2015_module-7.snap index 8a4f490..7be2619 100644 --- a/tests/snapshots/ecma262__es2015_module-7.snap +++ b/tests/snapshots/everything_js__es2015_module-7.snap @@ -1,5 +1,5 @@ --- -source: tests/ecma262.rs +source: tests/everything_js.rs expression: res --- [ diff --git a/tests/snapshots/ecma262__es2015_module-8.snap b/tests/snapshots/everything_js__es2015_module-8.snap similarity index 96% rename from tests/snapshots/ecma262__es2015_module-8.snap rename to tests/snapshots/everything_js__es2015_module-8.snap index 7155194..58683e4 100644 --- a/tests/snapshots/ecma262__es2015_module-8.snap +++ b/tests/snapshots/everything_js__es2015_module-8.snap @@ -1,5 +1,5 @@ --- -source: tests/ecma262.rs +source: tests/everything_js.rs expression: res --- [ diff --git a/tests/snapshots/ecma262__es2015_module-9.snap b/tests/snapshots/everything_js__es2015_module-9.snap similarity index 90% rename from tests/snapshots/ecma262__es2015_module-9.snap rename to tests/snapshots/everything_js__es2015_module-9.snap index 31eccad..654b9bc 100644 --- a/tests/snapshots/ecma262__es2015_module-9.snap +++ b/tests/snapshots/everything_js__es2015_module-9.snap @@ -1,5 +1,5 @@ --- -source: tests/ecma262.rs +source: tests/everything_js.rs expression: res --- [ diff --git a/tests/snapshots/ecma262__es2015_module.snap b/tests/snapshots/everything_js__es2015_module.snap similarity index 99% rename from tests/snapshots/ecma262__es2015_module.snap rename to tests/snapshots/everything_js__es2015_module.snap index b0721f4..24d46fe 100644 --- a/tests/snapshots/ecma262__es2015_module.snap +++ b/tests/snapshots/everything_js__es2015_module.snap @@ -1,5 +1,5 @@ --- -source: tests/ecma262.rs +source: tests/everything_js.rs expression: tokens --- Mod( diff --git a/tests/snapshots/ecma262__es2015_script.snap b/tests/snapshots/everything_js__es2015_script.snap similarity index 99% rename from tests/snapshots/ecma262__es2015_script.snap rename to tests/snapshots/everything_js__es2015_script.snap index 18de74d..aba396a 100644 --- a/tests/snapshots/ecma262__es2015_script.snap +++ b/tests/snapshots/everything_js__es2015_script.snap @@ -1,5 +1,5 @@ --- -source: tests/ecma262.rs +source: tests/everything_js.rs expression: tokens --- Script( diff --git a/tests/snapshots/ecma262__es5.snap b/tests/snapshots/everything_js__es5.snap similarity index 99% rename from tests/snapshots/ecma262__es5.snap rename to tests/snapshots/everything_js__es5.snap index 193a62c..69e9a0f 100644 --- a/tests/snapshots/ecma262__es5.snap +++ b/tests/snapshots/everything_js__es5.snap @@ -1,5 +1,5 @@ --- -source: tests/ecma262.rs +source: tests/everything_js.rs expression: tokens --- Script( From 456f4834fa3a6972179972bb60feac35ceccc056 Mon Sep 17 00:00:00 2001 From: Robert Masen Date: Thu, 24 Nov 2022 13:22:45 -0600 Subject: [PATCH 20/22] chore: apply md lint suggestions --- README.md | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 4702eb8..2796115 100644 --- a/README.md +++ b/README.md @@ -1,8 +1,10 @@ +# RESSA + [![Rust](https://github.com/rusty-ecma/RESSA/workflows/Rust/badge.svg?branch=featureless_test262)](https://github.com/rusty-ecma/RESSA/actions) [![crates.io](https://img.shields.io/crates/v/ressa.svg)](https://crates.io/crates/ressa) [![last commit master](https://img.shields.io/github/last-commit/FreeMasen/RESSA.svg)](https://github.com/FreeMasen/RESSA/commits/master) -# RESSA + > Rust EcmaScript Syntax Analyzer This project is part of a series of crates designed to enable developers to create JavaScript development tools using the Rust programming language. [Rusty ECMA Details](#rusty-ecma-details) @@ -16,6 +18,7 @@ Conveniently `Parser` implements `Iterator` over `Result`, this means that you can evaluate your JS in pieces from top to bottom. ### Iterator Example + ```rust use resast::prelude::*; use ressa::*; @@ -44,6 +47,7 @@ fn main() { Another way to interact with a `Parser` would be to utilize the `parse` method. This method will iterate over all of the found `ProgramParts` and collect them into a `Program`, ### Parse Example + ```rust use ressa::{ Parser, @@ -66,12 +70,14 @@ function Thing() { Once you get to the inner `parts` of a `Program` you have a `Vec` which will operate the same as the [iterator example](#iterator-example) # Rusty ECMA Details + ## The Rust ECMA Crates + - [RESS](https://github.com/freemasen/ress) - Tokenizer or Scanner - [RESSA](https://github.com/freemasen/ressa) - Parser - [RESAST](https://github.com/freemasen/resast) - AST - [RESW](https://github.com/freemasen/resw) - Writer ## Why So Many? -While much of what each crate provides is closely coupled with the other crates, the main goal is to provide the largest amount of customizability. For example, someone writing a fuzzer would only need the `RESAST` and `RESW`, it seems silly to require that they also pull in `RESS` and `RESSA` needlessly. +While much of what each crate provides is closely coupled with the other crates, the main goal is to provide the largest amount of customizability. For example, someone writing a fuzzer would only need the `RESAST` and `RESW`, it seems silly to require that they also pull in `RESS` and `RESSA` needlessly. From fa1f26dc824c3c9b97b5adbbbd8d2f8ea275b03e Mon Sep 17 00:00:00 2001 From: Robert Masen Date: Thu, 24 Nov 2022 13:23:07 -0600 Subject: [PATCH 21/22] chore: include stack size info to contributing.md --- CONTRIBUTING.md | 36 +++++++++++++++++++++++------------- 1 file changed, 23 insertions(+), 13 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index ec3024f..cf3ae3b 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -10,13 +10,33 @@ two people working on the same issue ## Testing +### Memory Issues + +The parsers defined here are recursive decent parsers, meaning they heavily rely on recursion +which ends up being problematic for the stack size. For running tests it is recommended to use +the environment variable `RUST_MIN_STACK` set to `9999999` (7 nines). Otherwise you will probably +encounter the error: + +```sh +thread '' has overflowed its stack +fatal runtime error: stack overflow +error: test failed, to rerun pass `--test ` + +Caused by: + process didn't exit successfully: `-` (signal: 6, SIGABRT: process abort signal) +``` + +[See this issue for more details](https://github.com/rusty-ecma/RESSA/issues/76) + +### Extra Files + There are a few sets of JavaScript files that are required to run the tests in this repository. -### NPM files +#### NPM files -This set can be easily acquired by running `npm install` in the root of this project. +This set can be easily acquired by running `npm install` in the root of this project. -### Spider Monkey Files +#### Spider Monkey Files An additional test is also available behind a feature flag `moz_central` that requires the JIT Test files from the FireFox repository, the expectation is that these will exist in the folder `moz-central` in the root of this project. To get these files you can either manually download and unzip them by following [this link](https://hg.mozilla.org/mozilla-central/archive/tip.zip/js/src/jit-test/tests/) or you can execute the following command. @@ -30,13 +50,3 @@ To run these tests simply execute the following command. ```sh cargo test --features moz_central -- moz_central ``` - -### Test262 - -Another test that is feature gated due to the time it takes to run parses all 30,000+ files in the [Test262](https://github.com/tc39/test262) test suite. The expectation is that the test folder from that repository is in the root of the project with the name test262. - -```sh -curl -L https://github.com/tc39/test262/zipball/master -o test262.zip -unzip -q test262.zip -d test262_full -mv ./test262_full/test ./test262 -``` From fc3ccae7e981ae3e0eeb5e44ecd3502a51141f63 Mon Sep 17 00:00:00 2001 From: Robert Masen Date: Thu, 24 Nov 2022 13:25:23 -0600 Subject: [PATCH 22/22] chore: add note to readme about js modules --- README.md | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 2796115..3447bec 100644 --- a/README.md +++ b/README.md @@ -15,7 +15,10 @@ The two major pieces that users will interact with are the `Parser` struct and t The parser struct will be the main way to convert text into an `AST`. Conveniently `Parser` implements `Iterator` over `Result`, -this means that you can evaluate your JS in pieces from top to bottom. +this means that you can evaluate your JS in pieces from top to bottom. + +> Note: By default the `Parser` will not be able to handle js module features, +> [see the module example](./examples/simple_module.rs) for details on how to parse js modules ### Iterator Example