@@ -34,11 +34,13 @@ impl BackgroundJob for DumpDb {
34
34
let tarball = spawn_blocking ( move || {
35
35
let directory = DumpDirectory :: create ( ) ?;
36
36
37
- info ! ( path = ?directory . export_dir , "Begin exporting database" ) ;
37
+ info ! ( "Begin exporting database" ) ;
38
38
directory. populate ( & database_url) ?;
39
39
40
- info ! ( path = ?directory. export_dir, "Creating tarball" ) ;
41
- create_tarball ( & directory. export_dir )
40
+ let export_dir = directory. path ( ) ;
41
+ info ! ( path = ?export_dir, "Creating tarball" ) ;
42
+ let prefix = PathBuf :: from ( directory. timestamp . format ( "%Y-%m-%d-%H%M%S" ) . to_string ( ) ) ;
43
+ create_tarball ( export_dir, & prefix)
42
44
} )
43
45
. await ?;
44
46
@@ -71,37 +73,22 @@ impl BackgroundJob for DumpDb {
71
73
/// make sure it gets deleted again even in the case of an error.
72
74
#[ derive( Debug ) ]
73
75
pub struct DumpDirectory {
74
- /// The temporary directory that contains the export directory. This is
75
- /// allowing `dead_code` since we're only relying on the `Drop`
76
- /// implementation to clean up the directory.
77
- #[ allow( dead_code) ]
76
+ /// The temporary directory that contains the export directory.
78
77
tempdir : tempfile:: TempDir ,
79
-
80
78
pub timestamp : chrono:: DateTime < chrono:: Utc > ,
81
- pub export_dir : PathBuf ,
82
79
}
83
80
84
81
impl DumpDirectory {
85
82
pub fn create ( ) -> anyhow:: Result < Self > {
83
+ debug ! ( "Creating database dump folder…" ) ;
86
84
let tempdir = tempfile:: tempdir ( ) ?;
87
-
88
85
let timestamp = chrono:: Utc :: now ( ) ;
89
- let timestamp_str = timestamp. format ( "%Y-%m-%d-%H%M%S" ) . to_string ( ) ;
90
- let export_dir = tempdir. path ( ) . join ( timestamp_str) ;
91
-
92
- debug ! ( ?export_dir, "Creating database dump folder…" ) ;
93
- fs:: create_dir_all ( & export_dir) . with_context ( || {
94
- format ! (
95
- "Failed to create export directory: {}" ,
96
- export_dir. display( )
97
- )
98
- } ) ?;
99
-
100
- Ok ( Self {
101
- tempdir,
102
- timestamp,
103
- export_dir,
104
- } )
86
+
87
+ Ok ( Self { tempdir, timestamp } )
88
+ }
89
+
90
+ pub fn path ( & self ) -> & Path {
91
+ self . tempdir . path ( )
105
92
}
106
93
107
94
pub fn populate ( & self , database_url : & str ) -> anyhow:: Result < ( ) > {
@@ -121,7 +108,7 @@ impl DumpDirectory {
121
108
fn add_readme ( & self ) -> anyhow:: Result < ( ) > {
122
109
use std:: io:: Write ;
123
110
124
- let path = self . export_dir . join ( "README.md" ) ;
111
+ let path = self . path ( ) . join ( "README.md" ) ;
125
112
debug ! ( ?path, "Writing README.md file…" ) ;
126
113
let mut readme = File :: create ( path) ?;
127
114
readme. write_all ( include_bytes ! ( "dump_db/readme_for_tarball.md" ) ) ?;
@@ -139,15 +126,15 @@ impl DumpDirectory {
139
126
crates_io_commit : dotenvy:: var ( "HEROKU_SLUG_COMMIT" )
140
127
. unwrap_or_else ( |_| "unknown" . to_owned ( ) ) ,
141
128
} ;
142
- let path = self . export_dir . join ( "metadata.json" ) ;
129
+ let path = self . path ( ) . join ( "metadata.json" ) ;
143
130
debug ! ( ?path, "Writing metadata.json file…" ) ;
144
131
let file = File :: create ( path) ?;
145
132
serde_json:: to_writer_pretty ( file, & metadata) ?;
146
133
Ok ( ( ) )
147
134
}
148
135
149
136
pub fn dump_schema ( & self , database_url : & str ) -> anyhow:: Result < ( ) > {
150
- let path = self . export_dir . join ( "schema.sql" ) ;
137
+ let path = self . path ( ) . join ( "schema.sql" ) ;
151
138
debug ! ( ?path, "Writing schema.sql file…" ) ;
152
139
let schema_sql =
153
140
File :: create ( & path) . with_context ( || format ! ( "Failed to create {}" , path. display( ) ) ) ?;
@@ -175,14 +162,13 @@ impl DumpDirectory {
175
162
176
163
pub fn dump_db ( & self , database_url : & str ) -> anyhow:: Result < ( ) > {
177
164
debug ! ( "Generating export.sql and import.sql files…" ) ;
178
- let export_script = self . export_dir . join ( "export.sql" ) ;
179
- let import_script = self . export_dir . join ( "import.sql" ) ;
165
+ let export_script = self . path ( ) . join ( "export.sql" ) ;
166
+ let import_script = self . path ( ) . join ( "import.sql" ) ;
180
167
gen_scripts:: gen_scripts ( & export_script, & import_script)
181
168
. context ( "Failed to generate export/import scripts" ) ?;
182
169
183
170
debug ! ( "Filling data folder…" ) ;
184
- fs:: create_dir ( self . export_dir . join ( "data" ) )
185
- . context ( "Failed to create `data` directory" ) ?;
171
+ fs:: create_dir ( self . path ( ) . join ( "data" ) ) . context ( "Failed to create `data` directory" ) ?;
186
172
187
173
run_psql ( & export_script, database_url)
188
174
}
@@ -216,16 +202,15 @@ pub fn run_psql(script: &Path, database_url: &str) -> anyhow::Result<()> {
216
202
Ok ( ( ) )
217
203
}
218
204
219
- fn create_tarball ( export_dir : & Path ) -> anyhow:: Result < tempfile:: NamedTempFile > {
205
+ fn create_tarball ( export_dir : & Path , prefix : & Path ) -> anyhow:: Result < tempfile:: NamedTempFile > {
220
206
debug ! ( "Creating tarball file" ) ;
221
207
let tempfile = tempfile:: NamedTempFile :: new ( ) ?;
222
208
let encoder = flate2:: write:: GzEncoder :: new ( tempfile. as_file ( ) , flate2:: Compression :: default ( ) ) ;
223
209
224
210
let mut archive = tar:: Builder :: new ( encoder) ;
225
211
226
- let tar_top_dir = PathBuf :: from ( export_dir. file_name ( ) . unwrap ( ) ) ;
227
- debug ! ( path = ?tar_top_dir, "Appending directory to tarball" ) ;
228
- archive. append_dir ( & tar_top_dir, export_dir) ?;
212
+ debug ! ( path = ?prefix, "Appending directory to tarball" ) ;
213
+ archive. append_dir ( prefix, export_dir) ?;
229
214
230
215
// Append readme, metadata, schemas.
231
216
let mut paths = Vec :: new ( ) ;
@@ -239,7 +224,7 @@ fn create_tarball(export_dir: &Path) -> anyhow::Result<tempfile::NamedTempFile>
239
224
// Sort paths to make the tarball deterministic.
240
225
paths. sort ( ) ;
241
226
for ( path, file_name) in paths {
242
- let name_in_tar = tar_top_dir . join ( file_name) ;
227
+ let name_in_tar = prefix . join ( file_name) ;
243
228
debug ! ( name = ?name_in_tar, "Appending file to tarball" ) ;
244
229
archive. append_path_with_name ( path, name_in_tar) ?;
245
230
}
@@ -251,13 +236,13 @@ fn create_tarball(export_dir: &Path) -> anyhow::Result<tempfile::NamedTempFile>
251
236
let visibility_config = VisibilityConfig :: get ( ) ;
252
237
let sorted_tables = visibility_config. topological_sort ( ) ;
253
238
254
- let path = tar_top_dir . join ( "data" ) ;
239
+ let path = prefix . join ( "data" ) ;
255
240
debug ! ( ?path, "Appending directory to tarball" ) ;
256
241
archive. append_dir ( path, export_dir. join ( "data" ) ) ?;
257
242
for table in sorted_tables {
258
243
let csv_path = export_dir. join ( "data" ) . join ( table) . with_extension ( "csv" ) ;
259
244
if csv_path. exists ( ) {
260
- let name_in_tar = tar_top_dir . join ( "data" ) . join ( table) . with_extension ( "csv" ) ;
245
+ let name_in_tar = prefix . join ( "data" ) . join ( table) . with_extension ( "csv" ) ;
261
246
debug ! ( name = ?name_in_tar, "Appending file to tarball" ) ;
262
247
archive. append_path_with_name ( csv_path, name_in_tar) ?;
263
248
}
@@ -284,16 +269,15 @@ mod tests {
284
269
. prefix ( "DumpTarball" )
285
270
. tempdir ( )
286
271
. unwrap ( ) ;
287
- let p = tempdir. path ( ) . join ( "0000-00-00" ) ;
272
+ let p = tempdir. path ( ) ;
288
273
289
- fs:: create_dir ( & p) . unwrap ( ) ;
290
274
fs:: write ( p. join ( "README.md" ) , "# crates.io Database Dump\n " ) . unwrap ( ) ;
291
275
fs:: create_dir ( p. join ( "data" ) ) . unwrap ( ) ;
292
276
fs:: write ( p. join ( "data" ) . join ( "crates.csv" ) , "" ) . unwrap ( ) ;
293
277
fs:: write ( p. join ( "data" ) . join ( "crate_owners.csv" ) , "" ) . unwrap ( ) ;
294
278
fs:: write ( p. join ( "data" ) . join ( "users.csv" ) , "" ) . unwrap ( ) ;
295
279
296
- let tarball = create_tarball ( & p ) . unwrap ( ) ;
280
+ let tarball = create_tarball ( p , & PathBuf :: from ( "0000-00-00" ) ) . unwrap ( ) ;
297
281
let gz = GzDecoder :: new ( File :: open ( tarball. path ( ) ) . unwrap ( ) ) ;
298
282
let mut tar = Archive :: new ( gz) ;
299
283
0 commit comments