@@ -2,6 +2,7 @@ use crate::{Statistics, TestOutcomeResult, VersionedStats};
2
2
3
3
use super :: SuiteResult ;
4
4
use color_eyre:: { eyre:: WrapErr , Result } ;
5
+ use rustc_hash:: FxHashMap ;
5
6
use serde:: { Deserialize , Serialize } ;
6
7
use std:: {
7
8
env, fs,
@@ -16,8 +17,12 @@ struct ResultInfo {
16
17
commit : Box < str > ,
17
18
#[ serde( rename = "u" ) ]
18
19
test262_commit : Box < str > ,
20
+ #[ serde( rename = "a" ) ]
21
+ stats : Statistics ,
22
+ #[ serde( rename = "v" ) ]
23
+ versioned_results : VersionedStats ,
19
24
#[ serde( rename = "r" ) ]
20
- results : SuiteResult ,
25
+ results : FxHashMap < Box < str > , SuiteResult > ,
21
26
}
22
27
23
28
/// Structure to store full result information.
@@ -29,8 +34,6 @@ struct ReducedResultInfo {
29
34
test262_commit : Box < str > ,
30
35
#[ serde( rename = "a" ) ]
31
36
stats : Statistics ,
32
- #[ serde( rename = "av" , default ) ]
33
- versioned_stats : VersionedStats ,
34
37
}
35
38
36
39
impl From < ResultInfo > for ReducedResultInfo {
@@ -39,8 +42,7 @@ impl From<ResultInfo> for ReducedResultInfo {
39
42
Self {
40
43
commit : info. commit ,
41
44
test262_commit : info. test262_commit ,
42
- stats : info. results . stats ,
43
- versioned_stats : info. results . versioned_stats ,
45
+ stats : info. stats ,
44
46
}
45
47
}
46
48
}
@@ -68,13 +70,13 @@ pub(crate) fn write_json(
68
70
let output_dir = if branch. is_empty ( ) {
69
71
output_dir. to_path_buf ( )
70
72
} else {
71
- let folder = output_dir. join ( branch) ;
73
+ let folder = output_dir. join ( & branch) ;
72
74
fs:: create_dir_all ( & folder) ?;
73
75
folder
74
76
} ;
75
77
76
78
// We make sure we are using the latest commit information in GitHub pages:
77
- update_gh_pages_repo ( output_dir. as_path ( ) , verbose) ;
79
+ update_gh_pages_repo ( output_dir. as_path ( ) , & branch , verbose) ;
78
80
79
81
if verbose != 0 {
80
82
println ! ( "Writing the results to {}..." , output_dir. display( ) ) ;
@@ -86,27 +88,32 @@ pub(crate) fn write_json(
86
88
87
89
let new_results = ResultInfo {
88
90
commit : env:: var ( "GITHUB_SHA" ) . unwrap_or_default ( ) . into_boxed_str ( ) ,
89
- test262_commit : get_test262_commit ( test262_path) ?,
90
- results,
91
+ test262_commit : get_test262_commit ( test262_path)
92
+ . context ( "could not get the test262 commit" ) ?,
93
+ stats : results. stats ,
94
+ versioned_results : results. versioned_stats ,
95
+ results : results. suites ,
91
96
} ;
92
97
93
98
let latest = BufWriter :: new ( fs:: File :: create ( latest) ?) ;
94
99
serde_json:: to_writer ( latest, & new_results) ?;
95
100
96
- // Write the full list of results, retrieving the existing ones first.
101
+ // Write the full result history for "main"
102
+ if branch == "main" {
103
+ let all_path = output_dir. join ( RESULTS_FILE_NAME ) ;
97
104
98
- let all_path = output_dir. join ( RESULTS_FILE_NAME ) ;
99
-
100
- let mut all_results: Vec < ReducedResultInfo > = if all_path. exists ( ) {
101
- serde_json:: from_reader ( BufReader :: new ( fs:: File :: open ( & all_path) ?) ) ?
102
- } else {
103
- Vec :: new ( )
104
- } ;
105
+ // We only keep history for the main branch
106
+ let mut all_results: Vec < ReducedResultInfo > = if all_path. is_file ( ) {
107
+ serde_json:: from_reader ( BufReader :: new ( fs:: File :: open ( & all_path) ?) ) ?
108
+ } else {
109
+ Vec :: new ( )
110
+ } ;
105
111
106
- all_results. push ( new_results. into ( ) ) ;
112
+ all_results. push ( new_results. into ( ) ) ;
107
113
108
- let output = BufWriter :: new ( fs:: File :: create ( & all_path) ?) ;
109
- serde_json:: to_writer ( output, & all_results) ?;
114
+ let output = BufWriter :: new ( fs:: File :: create ( & all_path) ?) ;
115
+ serde_json:: to_writer ( output, & all_results) ?;
116
+ }
110
117
111
118
if verbose != 0 {
112
119
println ! ( "Results written correctly" ) ;
@@ -125,17 +132,20 @@ fn get_test262_commit(test262_path: &Path) -> Result<Box<str>> {
125
132
}
126
133
127
134
/// Updates the GitHub pages repository by pulling latest changes before writing the new things.
128
- fn update_gh_pages_repo ( path : & Path , verbose : u8 ) {
129
- if env:: var ( "GITHUB_REF" ) . is_ok ( ) {
130
- use std:: process:: Command ;
135
+ fn update_gh_pages_repo ( path : & Path , branch : & str , verbose : u8 ) {
136
+ use std:: process:: Command ;
131
137
132
- // We run the command to pull the gh-pages branch: git -C ../gh-pages/ pull origin
133
- Command :: new ( "git" )
134
- . args ( [ "-C" , "../gh-pages" , "pull" , "--ff-only" ] )
135
- . output ( )
136
- . expect ( "could not update GitHub Pages" ) ;
138
+ // We run the command to pull the gh-pages branch: git -C ../gh-pages/ pull --ff-only
139
+ if verbose != 0 {
140
+ println ! ( "Cloning the Github Pages branch in ../gh-pages/..." ) ;
141
+ }
142
+ Command :: new ( "git" )
143
+ . args ( [ "-C" , "../gh-pages" , "pull" , "--ff-only" ] )
144
+ . output ( )
145
+ . expect ( "could not update GitHub Pages" ) ;
137
146
138
- // Copy the full results file
147
+ if branch == "main" {
148
+ // Copy the full results file if in the main branch
139
149
let from = Path :: new ( "../gh-pages/test262/refs/heads/main/" ) . join ( RESULTS_FILE_NAME ) ;
140
150
let to = path. join ( RESULTS_FILE_NAME ) ;
141
151
@@ -147,8 +157,6 @@ fn update_gh_pages_repo(path: &Path, verbose: u8) {
147
157
) ;
148
158
}
149
159
150
- // TO-DO: only copy the last result, not the whole file.
151
-
152
160
fs:: copy ( from, to) . expect ( "could not copy the main results file" ) ;
153
161
}
154
162
}
@@ -166,24 +174,24 @@ pub(crate) fn compare_results(base: &Path, new: &Path, markdown: bool) -> Result
166
174
) )
167
175
. wrap_err ( "could not read the new results" ) ?;
168
176
169
- let base_total = base_results. results . stats . total as isize ;
170
- let new_total = new_results. results . stats . total as isize ;
177
+ let base_total = base_results. stats . total as isize ;
178
+ let new_total = new_results. stats . total as isize ;
171
179
let total_diff = new_total - base_total;
172
180
173
- let base_passed = base_results. results . stats . passed as isize ;
174
- let new_passed = new_results. results . stats . passed as isize ;
181
+ let base_passed = base_results. stats . passed as isize ;
182
+ let new_passed = new_results. stats . passed as isize ;
175
183
let passed_diff = new_passed - base_passed;
176
184
177
- let base_ignored = base_results. results . stats . ignored as isize ;
178
- let new_ignored = new_results. results . stats . ignored as isize ;
185
+ let base_ignored = base_results. stats . ignored as isize ;
186
+ let new_ignored = new_results. stats . ignored as isize ;
179
187
let ignored_diff = new_ignored - base_ignored;
180
188
181
189
let base_failed = base_total - base_passed - base_ignored;
182
190
let new_failed = new_total - new_passed - new_ignored;
183
191
let failed_diff = new_failed - base_failed;
184
192
185
- let base_panics = base_results. results . stats . panic as isize ;
186
- let new_panics = new_results. results . stats . panic as isize ;
193
+ let base_panics = base_results. stats . panic as isize ;
194
+ let new_panics = new_results. stats . panic as isize ;
187
195
let panic_diff = new_panics - base_panics;
188
196
189
197
let base_conformance = ( base_passed as f64 / base_total as f64 ) * 100_f64 ;
@@ -406,8 +414,8 @@ impl ResultDiff {
406
414
/// Compares a base and a new result and returns the list of differences.
407
415
fn compute_result_diff (
408
416
base : & Path ,
409
- base_result : & SuiteResult ,
410
- new_result : & SuiteResult ,
417
+ base_result : & FxHashMap < Box < str > , SuiteResult > ,
418
+ new_result : & FxHashMap < Box < str > , SuiteResult > ,
411
419
) -> ResultDiff {
412
420
let mut final_diff = ResultDiff :: default ( ) ;
413
421
0 commit comments